refactor: prompts

This commit is contained in:
lencx 2023-05-22 01:59:50 +08:00
parent dd91f0f1f2
commit c53524b472
25 changed files with 744 additions and 545 deletions

View File

@ -140,13 +140,13 @@ sudo xattr -r -d com.apple.quarantine /YOUR_PATH/ChatGPT.app
- `[.chatgpt]` - 应用配置根路径
- `chat.conf.json` - 应用喜好配置
- `chat.awesome.json` - 自定义 URL 列表,类似于浏览器书签。可以将任意 URL 作为主窗口或托盘窗口 (**Control Conter -> Awesome**)
- `chat.model.json` - ChatGPT 输入提示,通过斜杠命令来快速完成输入,主要包含三部分:
- `user_custom` - 需要手动录入 (**Control Conter -> Language Model -> User Custom**)
- `sync_prompts` - 从 [f/awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 同步数据 (**Control Conter -> Language Model -> Sync Prompts**)
- `sync_custom` - 同步自定义的 json 或 csv 文件数据,支持本地和远程 (**Control Conter -> Language Model -> Sync Custom**)
- `chat.model.cmd.json` - 过滤(是否启用)和排序处理后的斜杠命令数据
- `[cache_model]` - 缓存同步或录入的数据
<!-- - `chat.awesome.json` - 自定义 URL 列表,类似于浏览器书签。可以将任意 URL 作为主窗口或托盘窗口 (**Control Conter -> Awesome**) -->
- `chat.prompt.json` - ChatGPT 输入提示,通过斜杠命令来快速完成输入,主要包含三部分:
- `user_custom` - 需要手动录入 (**Control Conter -> Prompts -> User Custom**)
- `sync_prompts` - 从 [f/awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 同步数据 (**Control Conter -> Prompts -> Sync Prompts**)
- `sync_custom` - 同步自定义的 json 或 csv 文件数据,支持本地和远程 (**Control Conter -> Prompts -> Sync Custom**)
- `chat.prompt.cmd.json` - 过滤(是否启用)和排序处理后的斜杠命令数据
- `[cache_prompts]` - 缓存同步或录入的数据
- `chatgpt_prompts.json` - 缓存 `sync_prompts` 数据
- `user_custom.json` - 缓存 `user_custom` 数据
- `ae6cf32a6f8541b499d6bfe549dbfca3.json` - 随机生成的文件名,缓存 `sync_custom` 数据

225
scripts/chat.js vendored
View File

@ -4,125 +4,122 @@
* @url https://github.com/lencx/ChatGPT/tree/main/scripts/chat.js
*/
async function init() {
new MutationObserver(function (mutationsList) {
for (const mutation of mutationsList) {
if (mutation.target.closest('form')) {
chatBtns();
}
}
}).observe(document.body, {
childList: true,
subtree: true,
});
document.addEventListener('visibilitychange', () =>
document.getElementsByTagName('textarea')[0]?.focus(),
);
}
async function chatBtns() {
const chatConf = (await invoke('get_app_conf')) || {};
const synth = window.speechSynthesis;
let currentUtterance = null;
let currentIndex = -1;
const list = Array.from(document.querySelectorAll('main >div>div>div>div>div'));
list.forEach((i, idx) => {
// if (i.querySelector('.chat-item-copy')) return;
if (i.querySelector('.chat-item-voice')) return;
if (!i.querySelector('button.rounded-md')) return;
if (!i.querySelector('.self-end')) return;
// const cpbtn = i.querySelector('button.rounded-md').cloneNode(true);
// cpbtn.classList.add('chat-item-copy');
// cpbtn.title = 'Copy to clipboard';
// cpbtn.innerHTML = setIcon('copy');
// i.querySelector('.self-end').appendChild(cpbtn);
// cpbtn.onclick = () => {
// copyToClipboard(i?.innerText?.trim() || '', cpbtn);
// }
const saybtn = i.querySelector('button.rounded-md').cloneNode(true);
saybtn.classList.add('chat-item-voice');
saybtn.title = 'Say';
saybtn.innerHTML = setIcon('voice');
i.querySelector('.self-end').appendChild(saybtn);
saybtn.onclick = () => {
if (currentUtterance && currentIndex !== -1) {
synth.cancel();
if (idx === currentIndex) {
saybtn.innerHTML = setIcon('voice');
currentUtterance = null;
currentIndex = -1;
return;
} else if (list[currentIndex].querySelector('.chat-item-voice')) {
list[currentIndex].querySelector('.chat-item-voice').innerHTML = setIcon('voice');
list[idx].querySelector('.chat-item-voice').innerHTML = setIcon('speaking');
}
}
const txt = i?.innerText?.trim() || '';
if (!txt) return;
const utterance = new SpeechSynthesisUtterance(txt);
const voices = speechSynthesis.getVoices();
let voice = voices.find((voice) => voice.voiceURI === chatConf.speech_lang);
if (!voice) {
voice = voices.find((voice) => voice.lang === 'en-US');
}
utterance.voice = voice;
currentIndex = idx;
utterance.lang = voice.lang;
// utterance.rate = 0.7;
// utterance.pitch = 1.1;
// utterance.volume = 1;
synth.speak(utterance);
amISpeaking = synth.speaking;
saybtn.innerHTML = setIcon('speaking');
currentUtterance = utterance;
currentIndex = idx;
utterance.onend = () => {
saybtn.innerHTML = setIcon('voice');
currentUtterance = null;
currentIndex = -1;
};
};
});
}
// function copyToClipboard(text, btn) {
// window.clearTimeout(window.__cpTimeout);
// btn.innerHTML = setIcon('cpok');
// if (navigator.clipboard) {
// navigator.clipboard.writeText(text);
// } else {
// var textarea = document.createElement('textarea');
// document.body.appendChild(textarea);
// textarea.style.position = 'fixed';
// textarea.style.clip = 'rect(0 0 0 0)';
// textarea.style.top = '10px';
// textarea.value = text;
// textarea.select();
// document.execCommand('copy', true);
// document.body.removeChild(textarea);
// }
// window.__cpTimeout = setTimeout(() => {
// btn.innerHTML = setIcon('copy');
// }, 1000);
// }
function focusOnInput() {
// This currently works because there is only a single `<textarea>` element on the ChatGPT UI page.
document.getElementsByTagName('textarea')[0].focus();
}
function setIcon(type) {
return {
var chatInit = (() => {
const ICONS = {
copy: `<svg class="chatappico copy" stroke="currentColor" fill="none" stroke-width="2" viewBox="0 0 24 24" stroke-linecap="round" stroke-linejoin="round" class="h-4 w-4" height="1em" width="1em" xmlns="http://www.w3.org/2000/svg"><path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2"></path><rect x="8" y="2" width="8" height="4" rx="1" ry="1"></rect></svg>`,
cpok: `<svg class="chatappico cpok" viewBox="0 0 24 24"><g fill="none" stroke="#10a37f" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"><rect width="8" height="4" x="8" y="2" rx="1" ry="1"/><path d="M8 4H6a2 2 0 0 0-2 2v14a2 2 0 0 0 2 2h12a2 2 0 0 0 2-2v-2M16 4h2a2 2 0 0 1 2 2v4m1 4H11"/><path d="m15 10l-4 4l4 4"/></g></svg>`,
voice: `<svg class="chatappico voice" viewBox="0 0 1024 1024"><path d="M542.923802 202.113207c-5.110391 0-10.717086 1.186012-16.572444 3.739161L360.043634 312.714188l-83.057671 0c-46.109154 0-83.433224 36.917818-83.433224 83.121116l0 166.646438c0 45.952588 36.950564 83.153862 83.433224 83.153862l83.057671 0 166.307723 106.829074c23.550369 10.218736 41.745776-0.717338 41.745776-23.898293L568.097134 229.687216C568.096111 212.426087 557.753555 202.113207 542.923802 202.113207z" fill="currentColor"></path><path d="M794.154683 314.39548c-16.758686-28.537963-33.771151-48.258097-45.610804-58.882062-3.986801-3.489474-8.972349-5.233188-13.833053-5.233188-5.79396 0-11.464099 2.337231-15.57779 6.91448-7.662517 8.631588-6.976902 21.808702 1.620917 29.410843 1.994424 1.744737 5.856381 5.700839 11.154038 11.777231 9.033747 10.437723 18.006096 22.774703 26.419719 37.072337 24.235984 41.033555 38.755676 89.011266 38.755676 143.688563 0 54.705949-14.519692 102.651938-38.755676 143.810337-8.414647 14.20656-17.448394 26.668383-26.484188 37.07336-5.234211 6.076392-9.096169 10.033517-11.149944 11.778254-8.538467 7.603165-9.224082 20.717857-1.683339 29.40982 7.599072 8.473999 20.807908 9.222035 29.40982 1.650593 11.900028-10.562567 28.910447-30.252001 45.732577-58.850339 27.79095-47.078225 44.490284-102.3122 44.490284-164.872025C838.708412 416.646282 821.946656 361.470635 794.154683 314.39548z" fill="currentColor"></path><path d="M690.846806 377.360534c-8.723685-17.790178-17.698081-30.2827-24.301476-37.260625-4.111644-4.3951-9.595542-6.544043-15.139815-6.544043-5.110391 0-10.159384 1.774413-14.270005 5.54632-8.350179 7.881504-8.847505 20.99722-0.997724 29.471219 3.927449 4.112668 10.468422 13.304004 17.448394 27.199479 11.587919 23.77038 18.567891 51.559283 18.567891 83.370803 0 31.80845-6.978948 59.72322-18.567891 83.400478-6.978948 13.892405-13.520945 23.052019-17.448394 27.259854-7.850805 8.410554-7.353478 21.559015 0.997724 29.440519 8.473999 7.882528 21.559015 7.353478 29.474288-1.025353 6.53995-7.011694 15.513322-19.440771 24.238031-37.356816 14.393825-29.189809 22.992667-63.243393 22.992667-101.781104C713.839473 440.603927 705.241654 406.583089 690.846806 377.360534z" fill="currentColor"></path></svg>`,
speaking: `<svg class="chatappico voice" viewBox="0 0 1024 1024"><path d="M542.923802 202.113207c-5.110391 0-10.717086 1.186012-16.572444 3.739161L360.043634 312.714188l-83.057671 0c-46.109154 0-83.433224 36.917818-83.433224 83.121116l0 166.646438c0 45.952588 36.950564 83.153862 83.433224 83.153862l83.057671 0 166.307723 106.829074c23.550369 10.218736 41.745776-0.717338 41.745776-23.898293L568.097134 229.687216C568.096111 212.426087 557.753555 202.113207 542.923802 202.113207z" fill="#10a37f"></path><path d="M794.154683 314.39548c-16.758686-28.537963-33.771151-48.258097-45.610804-58.882062-3.986801-3.489474-8.972349-5.233188-13.833053-5.233188-5.79396 0-11.464099 2.337231-15.57779 6.91448-7.662517 8.631588-6.976902 21.808702 1.620917 29.410843 1.994424 1.744737 5.856381 5.700839 11.154038 11.777231 9.033747 10.437723 18.006096 22.774703 26.419719 37.072337 24.235984 41.033555 38.755676 89.011266 38.755676 143.688563 0 54.705949-14.519692 102.651938-38.755676 143.810337-8.414647 14.20656-17.448394 26.668383-26.484188 37.07336-5.234211 6.076392-9.096169 10.033517-11.149944 11.778254-8.538467 7.603165-9.224082 20.717857-1.683339 29.40982 7.599072 8.473999 20.807908 9.222035 29.40982 1.650593 11.900028-10.562567 28.910447-30.252001 45.732577-58.850339 27.79095-47.078225 44.490284-102.3122 44.490284-164.872025C838.708412 416.646282 821.946656 361.470635 794.154683 314.39548z" fill="#10a37f"></path><path d="M690.846806 377.360534c-8.723685-17.790178-17.698081-30.2827-24.301476-37.260625-4.111644-4.3951-9.595542-6.544043-15.139815-6.544043-5.110391 0-10.159384 1.774413-14.270005 5.54632-8.350179 7.881504-8.847505 20.99722-0.997724 29.471219 3.927449 4.112668 10.468422 13.304004 17.448394 27.199479 11.587919 23.77038 18.567891 51.559283 18.567891 83.370803 0 31.80845-6.978948 59.72322-18.567891 83.400478-6.978948 13.892405-13.520945 23.052019-17.448394 27.259854-7.850805 8.410554-7.353478 21.559015 0.997724 29.440519 8.473999 7.882528 21.559015 7.353478 29.474288-1.025353 6.53995-7.011694 15.513322-19.440771 24.238031-37.356816 14.393825-29.189809 22.992667-63.243393 22.992667-101.781104C713.839473 440.603927 705.241654 406.583089 690.846806 377.360534z" fill="#10a37f"></path></svg>`,
}[type];
}
};
let currentUtterance = null;
let currentIndex = -1;
let chatConf = {};
async function init() {
chatConf = (await invoke('get_app_conf')) || {};
new MutationObserver(observeMutations).observe(document.body, {
childList: true,
subtree: true,
});
document.addEventListener('visibilitychange', focusOnInput);
}
function observeMutations(mutationsList) {
for (const mutation of mutationsList) {
if (mutation.target.closest('form')) {
addChatButtons();
}
}
}
function focusOnInput() {
const textArea = document.getElementsByTagName('textarea')[0];
if (textArea) {
textArea.focus();
}
}
function addChatButtons() {
const list = Array.from(document.querySelectorAll('main >div>div>div>div>div'));
list.forEach((item, idx) => {
if (shouldSkip(item)) {
return;
}
const saybtn = item.querySelector('button.rounded-md').cloneNode(true);
saybtn.classList.add('chat-item-voice');
saybtn.title = 'Say';
saybtn.innerHTML = ICONS.voice;
item.querySelector('.self-end').appendChild(saybtn);
saybtn.onclick = () => handleClick(item, idx, saybtn);
});
}
function shouldSkip(item) {
return (
item.querySelector('.chat-item-voice') ||
!item.querySelector('button.rounded-md') ||
!item.querySelector('.self-end')
);
}
function handleClick(item, idx, saybtn) {
const synth = window.speechSynthesis;
const list = Array.from(document.querySelectorAll('main >div>div>div>div>div'));
if (currentUtterance && currentIndex !== -1) {
synth.cancel();
if (idx === currentIndex) {
saybtn.innerHTML = ICONS.voice;
currentUtterance = null;
currentIndex = -1;
return;
} else if (list[currentIndex].querySelector('.chat-item-voice')) {
list[currentIndex].querySelector('.chat-item-voice').innerHTML = ICONS.voice;
list[idx].querySelector('.chat-item-voice').innerHTML = ICONS.speaking;
}
}
const txt = item?.innerText?.trim() || '';
if (!txt) return;
const utterance = new SpeechSynthesisUtterance(txt);
const voices = speechSynthesis.getVoices();
let voice = voices.find((voice) => voice.voiceURI === chatConf.speech_lang);
if (!voice) {
voice = voices.find((voice) => voice.lang === 'en-US');
}
utterance.voice = voice;
currentIndex = idx;
utterance.lang = voice.lang;
synth.speak(utterance);
saybtn.innerHTML = ICONS.speaking;
currentUtterance = utterance;
currentIndex = idx;
utterance.onend = () => {
saybtn.innerHTML = ICONS.voice;
currentUtterance = null;
currentIndex = -1;
};
}
return { init };
})();
if (document.readyState === 'complete' || document.readyState === 'interactive') {
init();
chatInit.init();
} else {
document.addEventListener('DOMContentLoaded', init);
document.addEventListener('DOMContentLoaded', chatInit.init);
}

110
scripts/cmd.js vendored
View File

@ -9,36 +9,36 @@ function init() {
styleDom.innerHTML = `form {
position: relative;
}
.chat-model-cmd-list {
.chat-prompt-cmd-list {
position: absolute;
bottom: 60px;
max-height: 100px;
overflow: auto;
z-index: 9999;
}
.chat-model-cmd-list>div {
.chat-prompt-cmd-list>div {
border: solid 2px rgba(80,80,80,.3);
border-radius: 5px;
background-color: #fff;
}
html.dark .chat-model-cmd-list>div {
html.dark .chat-prompt-cmd-list>div {
background-color: #4a4a4a;
}
html.dark .chat-model-cmd-list .cmd-item {
html.dark .chat-prompt-cmd-list .cmd-item {
border-color: #666;
}
html.dark .chat-model-cmd-list .cmd-item b {
html.dark .chat-prompt-cmd-list .cmd-item b {
color: #e8e8e8;
}
html.dark .chat-model-cmd-list .cmd-item i {
html.dark .chat-prompt-cmd-list .cmd-item i {
color: #999;
}
html.dark .chat-model-cmd-list .cmd-item.selected {
html.dark .chat-prompt-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.5);
}
.chat-model-cmd-list .cmd-item {
.chat-prompt-cmd-list .cmd-item {
font-size: 12px;
border-bottom: solid 1px rgba(80,80,80,.2);
padding: 2px 4px;
@ -46,13 +46,13 @@ function init() {
user-select: none;
cursor: pointer;
}
.chat-model-cmd-list .cmd-item:last-child {
.chat-prompt-cmd-list .cmd-item:last-child {
border-bottom: none;
}
.chat-model-cmd-list .cmd-item.selected {
.chat-prompt-cmd-list .cmd-item.selected {
background: rgba(59,130,246,.3);
}
.chat-model-cmd-list .cmd-item b {
.chat-prompt-cmd-list .cmd-item b {
display: inline-block;
width: 100px;
overflow: hidden;
@ -62,7 +62,7 @@ function init() {
margin-right: 10px;
color: #2a2a2a;
}
.chat-model-cmd-list .cmd-item i {
.chat-prompt-cmd-list .cmd-item i {
width: 100%;
max-width: 200px;
overflow: hidden;
@ -119,9 +119,9 @@ function init() {
initDom();
cmdTip();
}
if (mutation.target.getAttribute('class') === 'chat-model-cmd-list') {
if (mutation.target.getAttribute('class') === 'chat-prompt-cmd-list') {
// The `chatgpt prompt` fill can be done by clicking on the event.
const searchDom = document.querySelector('form .chat-model-cmd-list>div');
const searchDom = document.querySelector('form .chat-prompt-cmd-list>div');
const searchInput = document.querySelector('form textarea');
if (!searchDom) return;
searchDom.addEventListener('click', (event) => {
@ -144,20 +144,20 @@ function init() {
async function cmdTip() {
initDom();
const chatModelJson = (await invoke('get_chat_model_cmd')) || {};
const data = chatModelJson.data;
const chatPromptJson = (await invoke('get_chat_prompt_cmd')) || {};
const data = chatPromptJson.data;
if (data.length <= 0) return;
let modelDom = document.querySelector('.chat-model-cmd-list');
if (!modelDom) {
let promptDom = document.querySelector('.chat-prompt-cmd-list');
if (!promptDom) {
const dom = document.createElement('div');
dom.classList.add('chat-model-cmd-list');
dom.classList.add('chat-prompt-cmd-list');
document.querySelector('form').appendChild(dom);
modelDom = document.querySelector('.chat-model-cmd-list');
promptDom = document.querySelector('.chat-prompt-cmd-list');
// fix: tray window
if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
modelDom.style.bottom = '54px';
promptDom.style.bottom = '54px';
}
const itemDom = (v) =>
@ -168,16 +168,16 @@ async function cmdTip() {
}</i></div>`;
const renderList = (v) => {
initDom();
modelDom.innerHTML = `<div>${v.map(itemDom).join('')}</div>`;
window.__CHAT_MODEL_CMD_PROMPT__ = v[0]?.prompt.trim();
window.__CHAT_MODEL_CMD__ = v[0]?.cmd.trim();
window.__cmd_list = modelDom.querySelectorAll('.cmd-item');
promptDom.innerHTML = `<div>${v.map(itemDom).join('')}</div>`;
window.__CHAT_CMD_PROMPT__ = v[0]?.prompt.trim();
window.__CHAT_CMD__ = v[0]?.cmd.trim();
window.__cmd_list = promptDom.querySelectorAll('.cmd-item');
window.__cmd_index = 0;
window.__cmd_list[window.__cmd_index].classList.add('selected');
};
const setPrompt = (v = '') => {
if (v.trim()) {
window.__CHAT_MODEL_CMD_PROMPT__ = window.__CHAT_MODEL_CMD_PROMPT__?.replace(
window.__CHAT_CMD_PROMPT__ = window.__CHAT_CMD_PROMPT__?.replace(
/\{([^{}]*)\}/,
`{${v.trim()}}`,
);
@ -188,7 +188,7 @@ async function cmdTip() {
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.
// If more than one command appears in the search results, the first one will be used by default.
function cmdKeydown(event) {
if (!window.__CHAT_MODEL_CMD_PROMPT__) {
if (!window.__CHAT_CMD_PROMPT__) {
if (
!event.shiftKey &&
event.keyCode === 13 &&
@ -207,7 +207,7 @@ async function cmdTip() {
window.__cmd_list[window.__cmd_index].classList.remove('selected');
window.__cmd_index = window.__cmd_index - 1;
window.__cmd_list[window.__cmd_index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(
window.__CHAT_CMD_PROMPT__ = decodeURIComponent(
window.__cmd_list[window.__cmd_index].getAttribute('data-prompt'),
);
searchInput.value = `/${window.__cmd_list[window.__cmd_index].getAttribute('data-cmd')}`;
@ -219,70 +219,70 @@ async function cmdTip() {
window.__cmd_list[window.__cmd_index].classList.remove('selected');
window.__cmd_index = window.__cmd_index + 1;
window.__cmd_list[window.__cmd_index].classList.add('selected');
window.__CHAT_MODEL_CMD_PROMPT__ = decodeURIComponent(
window.__CHAT_CMD_PROMPT__ = decodeURIComponent(
window.__cmd_list[window.__cmd_index].getAttribute('data-prompt'),
);
searchInput.value = `/${window.__cmd_list[window.__cmd_index].getAttribute('data-cmd')}`;
event.preventDefault();
}
const containerHeight = modelDom.offsetHeight;
const containerHeight = promptDom.offsetHeight;
const itemHeight = window.__cmd_list[0].offsetHeight + 1;
const itemTop = window.__cmd_list[window.__cmd_index].offsetTop;
const itemBottom = itemTop + itemHeight;
if (itemTop < modelDom.scrollTop || itemBottom > modelDom.scrollTop + containerHeight) {
modelDom.scrollTop = itemTop;
if (itemTop < promptDom.scrollTop || itemBottom > promptDom.scrollTop + containerHeight) {
promptDom.scrollTop = itemTop;
}
// ------------------ TAB key replaces `{q}` tag content -------------------------------
// feat: https://github.com/lencx/ChatGPT/issues/54
if (event.keyCode === 9 && !window.__CHAT_MODEL_STATUS__) {
const strGroup = window.__CHAT_MODEL_CMD_PROMPT__.match(/\{([^{}]*)\}/) || [];
if (event.keyCode === 9 && !window.__CHAT_STATUS__) {
const strGroup = window.__CHAT_CMD_PROMPT__.match(/\{([^{}]*)\}/) || [];
if (strGroup[1]) {
searchInput.value = `/${window.__CHAT_MODEL_CMD__}` + ` {${strGroup[1]}}` + ' |-> ';
window.__CHAT_MODEL_STATUS__ = 1;
searchInput.value = `/${window.__CHAT_CMD__}` + ` {${strGroup[1]}}` + ' |-> ';
window.__CHAT_STATUS__ = 1;
} else {
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
searchInput.value = window.__CHAT_CMD_PROMPT__;
initDom();
}
event.preventDefault();
}
if (window.__CHAT_MODEL_STATUS__ === 1 && event.keyCode === 9) {
if (window.__CHAT_STATUS__ === 1 && event.keyCode === 9) {
// TAB
const data = searchInput.value.split('|->');
if (data[1]?.trim()) {
setPrompt(data[1]);
window.__CHAT_MODEL_STATUS__ = 2;
window.__CHAT_STATUS__ = 2;
}
event.preventDefault();
}
// input text
if (window.__CHAT_MODEL_STATUS__ === 2 && event.keyCode === 9) {
if (window.__CHAT_STATUS__ === 2 && event.keyCode === 9) {
// TAB
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_STATUS__;
searchInput.value = window.__CHAT_CMD_PROMPT__;
promptDom.innerHTML = '';
delete window.__CHAT_STATUS__;
event.preventDefault();
}
// ------------------ type in a space to complete the fill ------------------------------------
if (event.keyCode === 32) {
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
modelDom.innerHTML = '';
delete window.__CHAT_MODEL_CMD_PROMPT__;
searchInput.value = window.__CHAT_CMD_PROMPT__;
promptDom.innerHTML = '';
delete window.__CHAT_CMD_PROMPT__;
}
// ------------------ send --------------------------------------------------------------------
if (event.keyCode === 13 && window.__CHAT_MODEL_CMD_PROMPT__) {
if (event.keyCode === 13 && window.__CHAT_CMD_PROMPT__) {
// Enter
const data = searchInput.value.split('|->');
setPrompt(data[1]);
searchInput.value = window.__CHAT_MODEL_CMD_PROMPT__;
searchInput.value = window.__CHAT_CMD_PROMPT__;
initDom();
@ -297,7 +297,7 @@ async function cmdTip() {
initDom();
}
if (window.__CHAT_MODEL_STATUS__) return;
if (window.__CHAT_STATUS__) return;
const query = searchInput.value;
if (!query || !/^\//.test(query)) {
@ -324,13 +324,13 @@ async function cmdTip() {
}
function initDom() {
const modelDom = document.querySelector('.chat-model-cmd-list');
if (modelDom) {
modelDom.innerHTML = '';
const promptDom = document.querySelector('.chat-prompt-cmd-list');
if (promptDom) {
promptDom.innerHTML = '';
}
delete window.__CHAT_MODEL_CMD_PROMPT__;
delete window.__CHAT_MODEL_CMD__;
delete window.__CHAT_MODEL_STATUS__;
delete window.__CHAT_CMD_PROMPT__;
delete window.__CHAT_CMD__;
delete window.__CHAT_STATUS__;
delete window.__cmd_list;
delete window.__cmd_index;
}

393
scripts/core.js vendored
View File

@ -4,198 +4,233 @@
* @url https://github.com/lencx/ChatGPT/tree/main/scripts/core.js
*/
const uid = () => window.crypto.getRandomValues(new Uint32Array(1))[0];
function transformCallback(callback = () => {}, once = false) {
const identifier = uid();
const prop = `_${identifier}`;
Object.defineProperty(window, prop, {
value: (result) => {
if (once) {
Reflect.deleteProperty(window, prop);
}
return callback(result);
},
writable: false,
configurable: true,
});
return identifier;
}
async function invoke(cmd, args) {
return new Promise((resolve, reject) => {
if (!window.__TAURI_POST_MESSAGE__) reject('__TAURI_POST_MESSAGE__ does not exist!');
const callback = transformCallback((e) => {
resolve(e);
Reflect.deleteProperty(window, `_${error}`);
}, true);
const error = transformCallback((e) => {
reject(e);
Reflect.deleteProperty(window, `_${callback}`);
}, true);
window.__TAURI_POST_MESSAGE__({
cmd,
callback,
error,
...args,
});
});
}
async function message(message) {
invoke('messageDialog', {
__tauriModule: 'Dialog',
message: {
cmd: 'messageDialog',
message: message.toString(),
title: null,
type: null,
buttonLabel: null,
},
});
}
window.uid = uid;
window.invoke = invoke;
window.message = message;
window.transformCallback = transformCallback;
async function init() {
if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
document.getElementsByTagName('html')[0].style['font-size'] = '70%';
}
async function platform() {
return invoke('platform', {
__tauriModule: 'Os',
message: { cmd: 'platform' },
});
}
if (__TAURI_METADATA__.__currentWindow.label !== 'tray') {
const _platform = await platform();
const chatConf = (await invoke('get_app_conf')) || {};
if (/darwin/.test(_platform) && !chatConf.titlebar) {
const topStyleDom = document.createElement('style');
topStyleDom.innerHTML = `#chatgpt-app-window-top{position:fixed;top:0;z-index:999999999;width:100%;height:24px;background:transparent;cursor:grab;cursor:-webkit-grab;user-select:none;-webkit-user-select:none;}#chatgpt-app-window-top:active {cursor:grabbing;cursor:-webkit-grabbing;}`;
document.head.appendChild(topStyleDom);
const topDom = document.createElement('div');
topDom.id = 'chatgpt-app-window-top';
document.body.appendChild(topDom);
if (window.location.host === 'chat.openai.com') {
const nav = document.body.querySelector('nav');
if (nav) {
const currentPaddingTop = parseInt(
window
.getComputedStyle(document.querySelector('nav'), null)
.getPropertyValue('padding-top')
.replace('px', ''),
10,
);
const navStyleDom = document.createElement('style');
navStyleDom.innerHTML = `nav{padding-top:${
currentPaddingTop + topDom.clientHeight
}px !important}`;
document.head.appendChild(navStyleDom);
var coreInit = (() => {
const uid = () => window.crypto.getRandomValues(new Uint32Array(1))[0];
function transformCallback(callback = () => {}, once = false) {
const identifier = uid();
const prop = `_${identifier}`;
Object.defineProperty(window, prop, {
value: (result) => {
if (once) {
Reflect.deleteProperty(window, prop);
}
return callback(result);
},
writable: false,
configurable: true,
});
return identifier;
}
async function invoke(cmd, args) {
return new Promise((resolve, reject) => {
if (!window.__TAURI_POST_MESSAGE__) reject('__TAURI_POST_MESSAGE__ does not exist!');
const callback = transformCallback((e) => {
resolve(e);
Reflect.deleteProperty(window, `_${error}`);
}, true);
const error = transformCallback((e) => {
reject(e);
Reflect.deleteProperty(window, `_${callback}`);
}, true);
window.__TAURI_POST_MESSAGE__({
cmd,
callback,
error,
...args,
});
});
}
async function message(message) {
invoke('messageDialog', {
__tauriModule: 'Dialog',
message: {
cmd: 'messageDialog',
message: message.toString(),
title: null,
type: null,
buttonLabel: null,
},
});
}
window.uid = uid;
window.invoke = invoke;
window.message = message;
window.transformCallback = transformCallback;
async function init() {
if (__TAURI_METADATA__.__currentWindow.label === 'tray') {
document.getElementsByTagName('html')[0].style['font-size'] = '70%';
}
async function platform() {
return invoke('platform', {
__tauriModule: 'Os',
message: { cmd: 'platform' },
});
}
if (__TAURI_METADATA__.__currentWindow.label !== 'tray') {
const _platform = await platform();
const chatConf = (await invoke('get_app_conf')) || {};
if (/darwin/.test(_platform) && !chatConf.titlebar) {
const topStyleDom = document.createElement('style');
topStyleDom.innerHTML = `#chatgpt-app-window-top{position:fixed;top:0;z-index:999999999;width:100%;height:24px;background:transparent;cursor:grab;cursor:-webkit-grab;user-select:none;-webkit-user-select:none;}#chatgpt-app-window-top:active {cursor:grabbing;cursor:-webkit-grabbing;}`;
document.head.appendChild(topStyleDom);
const topDom = document.createElement('div');
topDom.id = 'chatgpt-app-window-top';
document.body.appendChild(topDom);
if (window.location.host === 'chat.openai.com') {
const nav = document.body.querySelector('nav');
if (nav) {
const currentPaddingTop = parseInt(
window
.getComputedStyle(document.querySelector('nav'), null)
.getPropertyValue('padding-top')
.replace('px', ''),
10,
);
const navStyleDom = document.createElement('style');
navStyleDom.innerHTML = `nav{padding-top:${
currentPaddingTop + topDom.clientHeight
}px !important}`;
document.head.appendChild(navStyleDom);
}
}
topDom.addEventListener('mousedown', () => invoke('drag_window'));
topDom.addEventListener('touchstart', () => invoke('drag_window'));
topDom.addEventListener('dblclick', () => invoke('fullscreen'));
}
topDom.addEventListener('mousedown', () => invoke('drag_window'));
topDom.addEventListener('touchstart', () => invoke('drag_window'));
topDom.addEventListener('dblclick', () => invoke('fullscreen'));
}
}
document.addEventListener('click', (e) => {
const origin = e.target.closest('a');
if (!origin || !origin.target) return;
if (origin && origin.href && origin.target !== '_self') {
invoke('open_link', { url: origin.href });
document.addEventListener('click', (e) => {
const origin = e.target.closest('a');
if (!origin || !origin.target) return;
if (origin && origin.href && origin.target !== '_self') {
invoke('open_link', { url: origin.href });
}
});
// Fix Chinese input method "Enter" on Safari
document.addEventListener(
'keydown',
(e) => {
if (e.keyCode == 229) e.stopPropagation();
},
true,
);
if (window.location.host === 'chat.openai.com') {
window.__sync_prompts = async function () {
await invoke('sync_prompts', { time: Date.now() });
};
}
});
// Fix Chinese input method "Enter" on Safari
document.addEventListener(
'keydown',
(e) => {
if (e.keyCode == 229) e.stopPropagation();
},
true,
);
coreZoom();
if (window.location.host === 'chat.openai.com') {
window.__sync_prompts = async function () {
await invoke('sync_prompts', { time: Date.now() });
};
window.__LoadingMask = LoadingMask;
}
coreZoom();
}
function coreZoom() {
const styleDom = document.createElement('style');
styleDom.innerHTML = `
#ZoomTopTip {
display: none;
position: fixed;
top: 0;
right: 20px;
background: #2a2a2a;
color: #fafafa;
padding: 20px 15px;
border-bottom-left-radius: 5px;
border-bottom-right-radius: 5px;
font-size: 16px;
font-weight: bold;
z-index: 999999;
box-shadow: 0 2px 2px 2px #d8d8d8;
}
.ZoomTopTipAni {
transition: opacity 200ms, display 200ms;
display: none;
opacity: 0;
}
`;
document.head.append(styleDom);
const zoomTipDom = document.createElement('div');
zoomTipDom.id = 'ZoomTopTip';
document.body.appendChild(zoomTipDom);
function zoom(callback) {
if (window.zoomSetTimeout) clearTimeout(window.zoomSetTimeout);
const htmlZoom = window.localStorage.getItem('htmlZoom') || '100%';
const html = document.getElementsByTagName('html')[0];
const zoom = callback(htmlZoom);
html.style.zoom = zoom;
window.localStorage.setItem('htmlZoom', zoom);
zoomTipDom.innerHTML = zoom;
zoomTipDom.style.display = 'block';
zoomTipDom.classList.remove('ZoomTopTipAni');
window.zoomSetTimeout = setTimeout(() => {
zoomTipDom.classList.add('ZoomTopTipAni');
}, 2500);
}
function zoomDefault() {
const htmlZoom = window.localStorage.getItem('htmlZoom');
if (htmlZoom) {
document.getElementsByTagName('html')[0].style.zoom = htmlZoom;
function coreZoom() {
const styleDom = document.createElement('style');
styleDom.innerHTML = `
#ZoomTopTip {
display: none;
position: fixed;
top: 0;
right: 20px;
background: #2a2a2a;
color: #fafafa;
padding: 20px 15px;
border-bottom-left-radius: 5px;
border-bottom-right-radius: 5px;
font-size: 16px;
font-weight: bold;
z-index: 999999;
box-shadow: 0 2px 2px 2px #d8d8d8;
}
.ZoomTopTipAni {
transition: opacity 200ms, display 200ms;
display: none;
opacity: 0;
}
`;
document.head.append(styleDom);
const zoomTipDom = document.createElement('div');
zoomTipDom.id = 'ZoomTopTip';
document.body.appendChild(zoomTipDom);
function zoom(callback) {
if (window.zoomSetTimeout) clearTimeout(window.zoomSetTimeout);
const htmlZoom = window.localStorage.getItem('htmlZoom') || '100%';
const html = document.getElementsByTagName('html')[0];
const zoom = callback(htmlZoom);
html.style.zoom = zoom;
window.localStorage.setItem('htmlZoom', zoom);
zoomTipDom.innerHTML = zoom;
zoomTipDom.style.display = 'block';
zoomTipDom.classList.remove('ZoomTopTipAni');
window.zoomSetTimeout = setTimeout(() => {
zoomTipDom.classList.add('ZoomTopTipAni');
}, 2500);
}
function zoomDefault() {
const htmlZoom = window.localStorage.getItem('htmlZoom');
if (htmlZoom) {
document.getElementsByTagName('html')[0].style.zoom = htmlZoom;
}
}
function zoomIn() {
zoom((htmlZoom) => `${Math.min(parseInt(htmlZoom) + 10, 200)}%`);
}
function zoomOut() {
zoom((htmlZoom) => `${Math.max(parseInt(htmlZoom) - 10, 30)}%`);
}
function zoom0() {
zoom(() => `100%`);
}
zoomDefault();
window.__zoomIn = zoomIn;
window.__zoomOut = zoomOut;
window.__zoom0 = zoom0;
}
function zoomIn() {
zoom((htmlZoom) => `${Math.min(parseInt(htmlZoom) + 10, 200)}%`);
function LoadingMask(text = 'Loading...') {
// 创建遮罩元素
const loadingOverlay = document.createElement('div');
function startLoading() {
loadingOverlay.style = `
position: fixed;
top: 0;
left: 0;
width: 100%;
height: 100%;
background-color: rgba(0,0,0,.5);
z-index: 9999;
display: flex;
justify-content: center;
align-items: center;`;
loadingOverlay.innerHTML = `<div style="color:#fff;font-weight:bold">${text}</div>`;
document.body.style.overflow = 'hidden';
document.body.appendChild(loadingOverlay);
}
function stopLoading() {
document.body.style.overflow = null;
loadingOverlay.remove();
}
return { startLoading, stopLoading };
}
function zoomOut() {
zoom((htmlZoom) => `${Math.max(parseInt(htmlZoom) - 10, 30)}%`);
}
function zoom0() {
zoom(() => `100%`);
}
zoomDefault();
window.__zoomIn = zoomIn;
window.__zoomOut = zoomOut;
window.__zoom0 = zoom0;
}
return { init };
})();
if (document.readyState === 'complete' || document.readyState === 'interactive') {
init();
coreInit.init();
} else {
document.addEventListener('DOMContentLoaded', init);
document.addEventListener('DOMContentLoaded', coreInit.init);
}

50
scripts/export.js vendored
View File

@ -153,7 +153,7 @@ async function init() {
}
async function exportMarkdown() {
const content = Array.from(document.querySelectorAll('main .items-center>div'))
const content = Array.from(document.querySelectorAll('main div.group'))
.map((i) => {
let j = i.cloneNode(true);
if (/dark\:bg-gray-800/.test(i.getAttribute('class'))) {
@ -168,25 +168,29 @@ async function init() {
await invoke('download_list', { pathname: 'chat.notes.json', filename, id, dir: 'notes' });
}
function downloadThread({ as = Format.PNG } = {}) {
async function downloadThread({ asF = Format.PNG } = {}) {
const { startLoading, stopLoading } = new window.__LoadingMask('Exporting in progress...');
startLoading();
const elements = new Elements();
elements.fixLocation();
await elements.fixLocation();
const pixelRatio = window.devicePixelRatio;
const minRatio = as === Format.PDF ? 2 : 2.5;
const minRatio = asF === Format.PDF ? 2 : 2.5;
window.devicePixelRatio = Math.max(pixelRatio, minRatio);
html2canvas(elements.thread, {
letterRendering: true,
}).then(async function (canvas) {
useCORS: true,
}).then((canvas) => {
elements.restoreLocation();
window.devicePixelRatio = pixelRatio;
const imgData = canvas.toDataURL('image/png');
requestAnimationFrame(() => {
if (as === Format.PDF) {
return handlePdf(imgData, canvas, pixelRatio);
requestAnimationFrame(async () => {
if (asF === Format.PDF) {
await handlePdf(imgData, canvas, pixelRatio);
} else {
handleImg(imgData);
await handleImg(imgData);
}
stopLoading();
});
});
}
@ -219,8 +223,7 @@ async function init() {
this.init();
}
init() {
// this.threadWrapper = document.querySelector(".cdfdFe");
this.spacer = document.querySelector("[class*='h-48'].w-full.flex-shrink-0");
this.spacer = document.querySelector("main div[class*='h-'].flex-shrink-0");
this.thread = document.querySelector(
"[class*='react-scroll-to-bottom']>[class*='react-scroll-to-bottom']>div",
);
@ -232,8 +235,6 @@ async function init() {
// h-full overflow-y-auto
this.positionForm = document.querySelector('form').parentNode;
// this.styledThread = document.querySelector("main");
// this.threadContent = document.querySelector(".gAnhyd");
this.scroller = Array.from(document.querySelectorAll('[class*="react-scroll-to"]')).filter(
(el) => el.classList.contains('h-full'),
)[0];
@ -245,8 +246,9 @@ async function init() {
this.hiddens = Array.from(document.querySelectorAll('.overflow-hidden'));
this.images = Array.from(document.querySelectorAll('img[srcset]'));
this.chatImages = Array.from(document.querySelectorAll('main img[src]'));
}
fixLocation() {
async fixLocation() {
this.hiddens.forEach((el) => {
el.classList.remove('overflow-hidden');
});
@ -261,10 +263,17 @@ async function init() {
img.setAttribute('srcset_old', srcset);
img.setAttribute('srcset', '');
});
//Fix to the text shifting down when generating the canvas
document.body.style.lineHeight = '0.5';
const chatImagePromises = this.chatImages.map(async (img) => {
const src = img.getAttribute('src');
if (!/^http/.test(src)) return;
const data = await invoke('fetch_image', { url: src });
const blob = new Blob([new Uint8Array(data)], { type: 'image/png' });
img.src = URL.createObjectURL(blob);
});
await Promise.all(chatImagePromises);
}
restoreLocation() {
async restoreLocation() {
this.hiddens.forEach((el) => {
el.classList.add('overflow-hidden');
});
@ -274,18 +283,11 @@ async function init() {
this.positionForm.style.display = null;
this.scroller.classList.add('h-full');
this.scroller.style.minHeight = null;
this.images.forEach((img) => {
const srcset = img.getAttribute('srcset_old');
img.setAttribute('srcset', srcset);
img.setAttribute('srcset_old', '');
});
document.body.style.lineHeight = null;
}
}
function setIcon(type) {
return {
// link: `<svg class="chatappico" viewBox="0 0 1024 1024"><path d="M1007.382 379.672L655.374 75.702C624.562 49.092 576 70.694 576 112.03v160.106C254.742 275.814 0 340.2 0 644.652c0 122.882 79.162 244.618 166.666 308.264 27.306 19.862 66.222-5.066 56.154-37.262C132.132 625.628 265.834 548.632 576 544.17V720c0 41.4 48.6 62.906 79.374 36.328l352.008-304c22.142-19.124 22.172-53.506 0-72.656z" p-id="8506" fill="currentColor"></path></svg>`,
png: `<svg class="chatappico" viewBox="0 0 1070 1024"><path d="M981.783273 0H85.224727C38.353455 0 0 35.374545 0 83.083636v844.893091c0 47.616 38.353455 86.574545 85.178182 86.574546h903.633454c46.917818 0 81.733818-38.958545 81.733819-86.574546V83.083636C1070.592 35.374545 1028.701091 0 981.783273 0zM335.825455 135.912727c74.193455 0 134.330182 60.974545 134.330181 136.285091 0 75.170909-60.136727 136.192-134.330181 136.192-74.286545 0-134.516364-61.021091-134.516364-136.192 0-75.264 60.229818-136.285091 134.516364-136.285091z m-161.512728 745.937455a41.890909 41.890909 0 0 1-27.648-10.379637 43.752727 43.752727 0 0 1-4.654545-61.067636l198.097454-255.162182a42.123636 42.123636 0 0 1 57.716364-6.702545l116.549818 128.139636 286.906182-352.814545c14.615273-18.711273 90.251636-106.775273 135.866182-6.935273 0.093091-0.093091 0.093091 112.965818 0.232727 247.761455 0.093091 140.8 0.093091 317.067636 0.093091 317.067636-1.024-0.093091-762.740364 0.093091-763.112727 0.093091z" fill="currentColor"></path></svg>`,
pdf: `<svg class="chatappico pdf" viewBox="0 0 1024 1024"><path d="M821.457602 118.382249H205.725895c-48.378584 0-87.959995 39.583368-87.959996 87.963909v615.731707c0 48.378584 39.581411 87.959995 87.959996 87.959996h615.733664c48.380541 0 87.961952-39.581411 87.961952-87.959996V206.346158c-0.001957-48.378584-39.583368-87.963909-87.963909-87.963909zM493.962468 457.544987c-10.112054 32.545237-21.72487 82.872662-38.806571 124.248336-8.806957 22.378397-8.380404 18.480717-15.001764 32.609808l5.71738-1.851007c58.760658-16.443827 99.901532-20.519564 138.162194-27.561607-7.67796-6.06371-14.350194-10.751884-19.631237-15.586807-26.287817-29.101504-35.464584-34.570387-70.440002-111.862636v0.003913z m288.36767 186.413594c-7.476424 8.356924-20.670227 13.191847-40.019704 13.191847-33.427694 0-63.808858-9.229597-107.79277-31.660824-75.648648 8.356924-156.097 17.214754-201.399704 31.729308-2.199293 0.876587-4.832967 1.759043-7.916674 3.077836-54.536215 93.237125-95.031389 132.767663-130.621199 131.19646-11.286054-0.49895-27.694661-7.044-32.973748-10.11988l-6.52157-6.196764-2.29517-4.353583c-3.07588-7.91863-3.954423-15.395054-2.197337-23.751977 4.838837-23.309771 29.907651-60.251638 82.686779-93.237126 8.356924-6.159587 27.430511-15.897917 45.020944-24.25484 13.311204-21.177004 19.45905-34.744531 36.341171-72.259702 19.102937-45.324228 36.505531-99.492589 47.500041-138.191543v-0.44025c-16.267727-53.219378-25.945401-89.310095-9.67376-147.80856 3.958337-16.71189 18.46702-33.864031 34.748444-33.864031h10.552304c10.115967 0 19.791684 3.520043 26.829814 10.552304 29.029107 29.031064 15.39114 103.824649 0.8805 162.323113-0.8805 2.63563-1.322707 4.832967-1.761 6.153717 17.59239 49.697378 45.400538 98.774492 73.108895 121.647926 11.436717 8.791304 22.638634 18.899444 36.71098 26.814161 19.791684-2.20125 37.517128-4.11487 55.547812-4.11487 54.540128 0 87.525615 9.67963 100.279169 30.351814 4.400543 7.034217 6.595923 15.389184 5.281043 24.1844-0.44025 10.996467-4.39663 21.112434-12.31526 29.031064z m-27.796407-36.748157c-4.394673-4.398587-17.024957-16.936907-78.601259-16.936907-3.073923 0-10.622744-0.784623-14.57521 3.612007 32.104987 14.072347 62.830525 24.757704 83.058545 24.757703 3.083707 0 5.72325-0.442207 8.356923-0.876586h1.759044c2.20125-0.8805 3.520043-1.324663 3.960293-5.71738-0.87463-1.324663-1.757087-3.083707-3.958336-4.838837z m-387.124553 63.041845c-9.237424 5.27713-16.71189 10.112054-21.112433 13.634053-31.226444 28.586901-51.018128 57.616008-53.217422 74.331812 19.789727-6.59788 45.737084-35.626987 74.329855-87.961952v-0.003913z m125.574957-297.822284l2.197336-1.761c3.079793-14.072347 5.232127-29.189554 7.87167-38.869184l1.318794-7.036174c4.39663-25.070771 2.71781-39.720334-4.76057-50.272637l-6.59788-2.20125a57.381208 57.381208 0 0 0-3.079794 5.27713c-7.474467 18.47289-7.063567 55.283661 3.0524 94.865072l-0.001956-0.001957z" fill="currentColor"></path></svg>`,
md: `<svg class="chatappico md" viewBox="0 0 1024 1024" width="200" height="200"><path d="M128 128h768a42.666667 42.666667 0 0 1 42.666667 42.666667v682.666666a42.666667 42.666667 0 0 1-42.666667 42.666667H128a42.666667 42.666667 0 0 1-42.666667-42.666667V170.666667a42.666667 42.666667 0 0 1 42.666667-42.666667z m170.666667 533.333333v-170.666666l85.333333 85.333333 85.333333-85.333333v170.666666h85.333334v-298.666666h-85.333334l-85.333333 85.333333-85.333333-85.333333H213.333333v298.666666h85.333334z m469.333333-128v-170.666666h-85.333333v170.666666h-85.333334l128 128 128-128h-85.333333z" p-id="1381" fill="currentColor"></path></svg>`,

View File

@ -80,3 +80,10 @@ pub async fn get_data(app: AppHandle, url: String, is_msg: Option<bool>) -> Opti
None
})
}
#[tauri::command]
pub async fn fetch_image(url: String) -> Vec<u8> {
let response = reqwest::get(url).await.unwrap();
let bytes = response.bytes().await.unwrap();
bytes.to_vec()
}

View File

@ -10,27 +10,27 @@ use tauri::{api, command, AppHandle, Manager};
use walkdir::WalkDir;
#[command]
pub fn get_chat_model_cmd() -> serde_json::Value {
let path = utils::app_root().join("chat.model.cmd.json");
pub fn get_chat_prompt_cmd() -> serde_json::Value {
let path = utils::app_root().join("chat.prompt.cmd.json");
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap()
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PromptRecord {
pub struct PromptBaseRecord {
pub cmd: Option<String>,
pub act: String,
pub prompt: String,
}
#[command]
pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
pub fn parse_prompt(data: String) -> Vec<PromptBaseRecord> {
let mut rdr = csv::Reader::from_reader(data.as_bytes());
let mut list = vec![];
for result in rdr.deserialize() {
let record: PromptRecord = result.unwrap_or_else(|err| {
let record: PromptBaseRecord = result.unwrap_or_else(|err| {
error!("parse_prompt: {}", err);
PromptRecord {
PromptBaseRecord {
cmd: None,
act: "".to_string(),
prompt: "".to_string(),
@ -44,7 +44,7 @@ pub fn parse_prompt(data: String) -> Vec<PromptRecord> {
}
#[derive(serde::Serialize, serde::Deserialize, Debug, Clone)]
pub struct ModelRecord {
pub struct PromptRecord {
pub cmd: String,
pub act: String,
pub prompt: String,
@ -53,15 +53,15 @@ pub struct ModelRecord {
}
#[command]
pub fn cmd_list() -> Vec<ModelRecord> {
pub fn cmd_list() -> Vec<PromptRecord> {
let mut list = vec![];
for entry in WalkDir::new(utils::app_root().join("cache_model"))
for entry in WalkDir::new(utils::app_root().join("cache_prompts"))
.into_iter()
.filter_map(|e| e.ok())
{
let file = fs::read_to_string(entry.path().display().to_string());
if let Ok(v) = file {
let data: Vec<ModelRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let data: Vec<PromptRecord> = serde_json::from_str(&v).unwrap_or_else(|_| vec![]);
let enable_list = data.into_iter().filter(|v| v.enable);
list.extend(enable_list)
}
@ -159,7 +159,7 @@ pub fn download_list(pathname: &str, dir: &str, filename: Option<String>, id: Op
}
#[command]
pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>> {
pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<PromptRecord>> {
let res = utils::get_data(GITHUB_PROMPTS_CSV_URL, Some(&app))
.await
.unwrap();
@ -167,7 +167,7 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
if let Some(v) = res {
let data = parse_prompt(v)
.iter()
.map(move |i| ModelRecord {
.map(move |i| PromptRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
@ -178,21 +178,21 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
tags: vec!["chatgpt-prompts".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
.collect::<Vec<PromptRecord>>();
let data2 = data.clone();
let model = utils::app_root().join("chat.model.json");
let model_cmd = utils::app_root().join("chat.model.cmd.json");
let prompts = utils::app_root().join("chat.prompt.json");
let prompt_cmd = utils::app_root().join("chat.prompt.cmd.json");
let chatgpt_prompts = utils::app_root()
.join("cache_model")
.join("cache_prompts")
.join("chatgpt_prompts.json");
if !utils::exists(&model) {
if !utils::exists(&prompts) {
fs::write(
&model,
&prompts,
serde_json::json!({
"name": "ChatGPT Model",
"name": "ChatGPT Prompts",
"link": "https://github.com/lencx/ChatGPT"
})
.to_string(),
@ -208,9 +208,9 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
.unwrap();
let cmd_data = cmd_list();
// chat.model.cmd.json
// chat.prompt.cmd.json
fs::write(
model_cmd,
prompt_cmd,
serde_json::to_string_pretty(&serde_json::json!({
"name": "ChatGPT CMD",
"last_updated": time,
@ -224,13 +224,17 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
"sync_prompts".to_string(),
serde_json::json!({ "id": "chatgpt_prompts", "last_updated": time }),
);
let model_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&model).unwrap()).unwrap(),
let prompts_data = utils::merge(
&serde_json::from_str(&fs::read_to_string(&prompts).unwrap()).unwrap(),
&kv,
);
// chat.model.json
fs::write(model, serde_json::to_string_pretty(&model_data).unwrap()).unwrap();
// chat.prompt.json
fs::write(
prompts,
serde_json::to_string_pretty(&prompts_data).unwrap(),
)
.unwrap();
// refresh window
api::dialog::message(
@ -248,7 +252,7 @@ pub async fn sync_prompts(app: AppHandle, time: u64) -> Option<Vec<ModelRecord>>
}
#[command]
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<ModelRecord>> {
pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<PromptRecord>> {
info!("sync_user_prompts: url => {}", url);
let res = utils::get_data(&url, None).await.unwrap_or_else(|err| {
error!("chatgpt_http: {}", err);
@ -273,7 +277,7 @@ pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<Mod
let data = data
.iter()
.map(move |i| ModelRecord {
.map(move |i| PromptRecord {
cmd: if i.cmd.is_some() {
i.cmd.clone().unwrap()
} else {
@ -284,7 +288,7 @@ pub async fn sync_user_prompts(url: String, data_type: String) -> Option<Vec<Mod
tags: vec!["user-sync".to_string()],
enable: true,
})
.collect::<Vec<ModelRecord>>();
.collect::<Vec<PromptRecord>>();
return Some(data);
}

View File

@ -237,7 +237,7 @@ pub fn menu_handler(event: WindowMenuEvent<tauri::Wry>) {
utils::run_check_update(app, false, None);
}
// Preferences
"control_center" => window::cmd::control_window(app),
"control_center" => window::cmd::control_window(app, "control".into()),
"restart" => tauri::api::process::restart(&app.env()),
"inject_script" => open(&app, &script_path),
"go_conf" => utils::open_file(utils::app_root()),
@ -453,7 +453,7 @@ pub fn tray_handler(handle: &AppHandle, event: SystemTrayEvent) {
}
}
SystemTrayEvent::MenuItemClick { id, .. } => match id.as_str() {
"control_center" => window::cmd::control_window(app),
"control_center" => window::cmd::control_window(app, "control".into()),
"restart" => tauri::api::process::restart(&handle.env()),
"show_dock_icon" => {
AppConf::read()

View File

@ -2,5 +2,6 @@ pub mod cmd;
pub mod fs_extra;
pub mod gpt;
pub mod menu;
pub mod script;
pub mod setup;
pub mod window;

View File

@ -0,0 +1,123 @@
use crate::utils::{app_root, exists};
use log::error;
use log::info;
use regex::Regex;
use serde_json::{from_str, json, Value};
use std::fs;
use tauri::Manager;
use crate::{conf::SCRIPTS_MANIFEST, window};
pub async fn init_script(app: tauri::AppHandle) -> anyhow::Result<(), reqwest::Error> {
let body = reqwest::get(SCRIPTS_MANIFEST).await?.text().await?;
if exist_scripts("manifest.json".into()) {
let compare = compare_nested_json_objects(
&fs::read_to_string(app_root().join("scripts").join("manifest.json")).unwrap(),
&body,
);
if !compare {
info!("update_scripts_manifest");
create_chatgpt_scripts("manifest.json".into(), body);
if let Some(v) = app.get_window("core") {
tauri::api::dialog::ask(Some(&v), "Scripts Update", "There are new available scripts. Please go to 'Preferences -> Control Center -> Scripts' to check for updates.", |is_ok| {
if is_ok {
info!("update_scripts_manifest_ok");
// TODO: goto scripts page
window::cmd::control_window(app, "scripts".into())
} else {
info!("update_scripts_manifest_cancel");
}
});
}
}
} else {
create_chatgpt_scripts("manifest.json".into(), body);
}
Ok(())
}
pub fn parse_script(name: String) -> serde_json::Value {
let code = &fs::read_to_string(name).unwrap();
let re_name = Regex::new(r"@name\s+(.*?)\n").unwrap();
let re_version = Regex::new(r"@version\s+(.*?)\n").unwrap();
let re_url = Regex::new(r"@url\s+(.*?)\n").unwrap();
let mut name = String::new();
let mut version = String::new();
let mut url = String::new();
if let Some(capture) = re_name.captures(code) {
name = capture.get(1).unwrap().as_str().trim().to_owned();
}
if let Some(capture) = re_version.captures(code) {
version = capture.get(1).unwrap().as_str().trim().to_owned();
}
if let Some(capture) = re_url.captures(code) {
url = capture.get(1).unwrap().as_str().trim().to_owned();
}
let json_data = json!({
"name": name,
"version": version,
"url": url,
});
json_data
}
pub fn exist_scripts(file: String) -> bool {
let script_file = app_root().join("scripts").join(file);
exists(&script_file)
}
pub fn create_chatgpt_scripts(file: String, body: String) {
let script_file = app_root().join("scripts").join(file);
info!("script_file: {:?}", script_file);
fs::write(&script_file, body).unwrap();
}
fn compare_nested_json_objects(json1: &str, json2: &str) -> bool {
let value1: Value = from_str(json1).unwrap_or_else(|err| {
error!("json_parse_1_error: {}", err);
json!({})
});
let value2: Value = from_str(json2).unwrap_or_else(|err| {
error!("json_parse_2_error: {}", err);
json!({})
});
compare_json_objects(&value1, &value2)
}
pub fn compare_json_objects(obj1: &Value, obj2: &Value) -> bool {
match (obj1, obj2) {
(Value::Object(obj1), Value::Object(obj2)) => {
if obj1.len() != obj2.len() {
return false;
}
for (key, value) in obj1 {
if !obj2.contains_key(key) || !compare_json_objects(value, obj2.get(key).unwrap()) {
return false;
}
}
true
}
(Value::Array(arr1), Value::Array(arr2)) => {
if arr1.len() != arr2.len() {
return false;
}
for (value1, value2) in arr1.iter().zip(arr2.iter()) {
if !compare_json_objects(value1, value2) {
return false;
}
}
true
}
_ => obj1 == obj2,
}
}

View File

@ -1,18 +1,28 @@
use crate::{app::window, conf::AppConf, utils};
use crate::{app, conf::AppConf, utils};
use log::{error, info};
use tauri::{utils::config::WindowUrl, window::WindowBuilder, App, GlobalShortcutManager, Manager};
use wry::application::accelerator::Accelerator;
pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>> {
pub fn init(app: &mut App) -> Result<(), Box<dyn std::error::Error>> {
info!("stepup");
let app_conf = AppConf::read();
let url = app_conf.main_origin.to_string();
let theme = AppConf::theme_mode();
let handle = app.app_handle();
let handle = app.app_handle();
tauri::async_runtime::spawn(async move {
app::script::init_script(handle)
.await
.unwrap_or_else(|err| {
error!("script_init_error: {}", err);
});
});
let handle = app.app_handle();
tauri::async_runtime::spawn(async move {
info!("stepup_tray");
window::tray_window(&handle);
app::window::tray_window(&handle);
});
if let Some(v) = app_conf.clone().global_shortcut {

View File

@ -124,13 +124,13 @@ pub mod cmd {
}
#[tauri::command]
pub fn control_window(handle: tauri::AppHandle) {
pub fn control_window(handle: tauri::AppHandle, win_type: String) {
tauri::async_runtime::spawn(async move {
if handle.get_window("main").is_none() {
WindowBuilder::new(
&handle,
"main",
WindowUrl::App("index.html?type=control".into()),
WindowUrl::App(format!("index.html?type={}", win_type).into()),
)
.title("Control Center")
.resizable(true)

View File

@ -12,9 +12,11 @@ pub const APP_WEBSITE: &str = "https://lencx.github.io/app/";
pub const ISSUES_URL: &str = "https://github.com/lencx/ChatGPT/issues";
pub const NOFWL_APP: &str = "https://github.com/lencx/nofwl";
pub const UPDATE_LOG_URL: &str = "https://github.com/lencx/ChatGPT/blob/main/UPDATE_LOG.md";
pub const BUY_COFFEE: &str = "https://www.buymeacoffee.com/lencx";
// pub const BUY_COFFEE: &str = "https://www.buymeacoffee.com/lencx";
pub const GITHUB_PROMPTS_CSV_URL: &str =
"https://raw.githubusercontent.com/f/awesome-chatgpt-prompts/main/prompts.csv";
pub const SCRIPTS_MANIFEST: &str =
"https://raw.githubusercontent.com/lencx/ChatGPT/main/scripts/manifest.json";
pub const APP_CONF_PATH: &str = "chat.conf.json";
pub const CHATGPT_URL: &str = "https://chat.openai.com";

View File

@ -61,7 +61,8 @@ async fn main() {
cmd::open_file,
cmd::download_file,
cmd::get_data,
gpt::get_chat_model_cmd,
cmd::fetch_image,
gpt::get_chat_prompt_cmd,
gpt::parse_prompt,
gpt::sync_prompts,
gpt::sync_user_prompts,

View File

@ -33,7 +33,9 @@ pub fn create_file(path: &Path) -> Result<File> {
}
pub fn create_chatgpt_prompts() {
let sync_file = app_root().join("cache_model").join("chatgpt_prompts.json");
let sync_file = app_root()
.join("cache_prompts")
.join("chatgpt_prompts.json");
if !exists(&sync_file) {
create_file(&sync_file).unwrap();
fs::write(&sync_file, "[]").unwrap();

View File

@ -1,58 +0,0 @@
import { useState, useEffect } from 'react';
import { clone } from 'lodash';
import { invoke } from '@tauri-apps/api';
import { CHAT_MODEL_JSON, CHAT_MODEL_CMD_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit';
export default function useChatModel(key: string, file = CHAT_MODEL_JSON) {
const [modelJson, setModelJson] = useState<Record<string, any>>({});
useInit(async () => {
const data = await readJSON(file, {
defaultVal: { name: 'ChatGPT Model', [key]: null },
});
setModelJson(data);
});
const modelSet = async (data: Record<string, any>[] | Record<string, any>) => {
const oData = clone(modelJson);
oData[key] = data;
await writeJSON(file, oData);
setModelJson(oData);
};
return { modelJson, modelSet, modelData: modelJson?.[key] || [] };
}
export function useCacheModel(file = '') {
const [modelCacheJson, setModelCacheJson] = useState<Record<string, any>[]>([]);
useEffect(() => {
if (!file) return;
(async () => {
const data = await readJSON(file, { isRoot: true, isList: true });
setModelCacheJson(data);
})();
}, [file]);
const modelCacheSet = async (data: Record<string, any>[], newFile = '') => {
await writeJSON(newFile ? newFile : file, data, { isRoot: true });
setModelCacheJson(data);
await modelCacheCmd();
};
const modelCacheCmd = async () => {
// Generate the `chat.model.cmd.json` file and refresh the page for the slash command to take effect.
const list = await invoke('cmd_list');
await writeJSON(CHAT_MODEL_CMD_JSON, {
name: 'ChatGPT CMD',
last_updated: Date.now(),
data: list,
});
await invoke('window_reload', { label: 'core' });
await invoke('window_reload', { label: 'tray' });
};
return { modelCacheJson, modelCacheSet, modelCacheCmd };
}

58
src/hooks/useChatPrompt.ts vendored Normal file
View File

@ -0,0 +1,58 @@
import { useState, useEffect } from 'react';
import { clone } from 'lodash';
import { invoke } from '@tauri-apps/api';
import { CHAT_PROMPT_JSON, CHAT_PROMPT_CMD_JSON, readJSON, writeJSON } from '@/utils';
import useInit from '@/hooks/useInit';
export default function useChatPrompt(key: string, file = CHAT_PROMPT_JSON) {
const [promptJson, setPromptJson] = useState<Record<string, any>>({});
useInit(async () => {
const data = await readJSON(file, {
defaultVal: { name: 'ChatGPT Prompts', [key]: null },
});
setPromptJson(data);
});
const promptSet = async (data: Record<string, any>[] | Record<string, any>) => {
const oData = clone(promptJson);
oData[key] = data;
await writeJSON(file, oData);
setPromptJson(oData);
};
return { promptJson, promptSet, promptData: promptJson?.[key] || [] };
}
export function useCachePrompt(file = '') {
const [promptCacheJson, setPromptCacheJson] = useState<Record<string, any>[]>([]);
useEffect(() => {
if (!file) return;
(async () => {
const data = await readJSON(file, { isRoot: true, isList: true });
setPromptCacheJson(data);
})();
}, [file]);
const promptCacheSet = async (data: Record<string, any>[], newFile = '') => {
await writeJSON(newFile ? newFile : file, data, { isRoot: true });
setPromptCacheJson(data);
await promptCacheCmd();
};
const promptCacheCmd = async () => {
// Generate the `chat.prompt.cmd.json` file and refresh the page for the slash command to take effect.
const list = await invoke('cmd_list');
await writeJSON(CHAT_PROMPT_CMD_JSON, {
name: 'ChatGPT CMD',
last_updated: Date.now(),
data: list,
});
await invoke('window_reload', { label: 'core' });
await invoke('window_reload', { label: 'tray' });
};
return { promptCacheJson, promptCacheSet, promptCacheCmd };
}

View File

@ -23,8 +23,8 @@ export default function ChatLayout() {
if (location.search === '?type=control') {
go('/settings');
}
if (location.search === '?type=preview') {
go('/?type=preview');
if (location.search === '?type=scripts') {
go('/scripts');
}
setMenuKey(location.pathname);
setDashboard(location.pathname === '/');
@ -88,7 +88,7 @@ export default function ChatLayout() {
theme={appInfo.appTheme === 'dark' ? 'dark' : 'light'}
inlineIndent={12}
items={menuItems}
// defaultOpenKeys={['/model']}
// defaultOpenKeys={['/prompts']}
onClick={(i) => go(i.key)}
/>
</Sider>

8
src/utils.ts vendored
View File

@ -3,8 +3,8 @@ import { homeDir, join, dirname } from '@tauri-apps/api/path';
import dayjs from 'dayjs';
export const APP_CONF_JSON = 'chat.conf.json';
export const CHAT_MODEL_JSON = 'chat.model.json';
export const CHAT_MODEL_CMD_JSON = 'chat.model.cmd.json';
export const CHAT_PROMPT_JSON = 'chat.prompt.json';
export const CHAT_PROMPT_CMD_JSON = 'chat.prompt.cmd.json';
export const CHAT_DOWNLOAD_JSON = 'chat.download.json';
export const CHAT_AWESOME_JSON = 'chat.awesome.json';
export const CHAT_NOTES_JSON = 'chat.notes.json';
@ -23,8 +23,8 @@ export const chatRoot = async () => {
return join(await homeDir(), '.chatgpt');
};
export const chatModelPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_MODEL_JSON);
export const chatPromptPath = async (): Promise<string> => {
return join(await chatRoot(), CHAT_PROMPT_JSON);
};
export const chatPromptsPath = async (): Promise<string> => {

View File

@ -61,8 +61,11 @@ export default function Dashboard() {
No data
</div>
<div className="txt">
Go to <a onClick={() => invoke('control_window')}>{'Control Center -> Awesome'}</a> to add
data and make sure they are enabled.
Go to{' '}
<a onClick={() => invoke('control_window', { type: 'control' })}>
{'Control Center -> Awesome'}
</a>{' '}
to add data and make sure they are enabled.
</div>
</div>
);

View File

@ -5,8 +5,8 @@ import { invoke, path, fs } from '@tauri-apps/api';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import { TABLE_PAGINATION } from '@/hooks/useTable';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import { CHAT_MODEL_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import useChatPrompt, { useCachePrompt } from '@/hooks/useChatPrompt';
import { CHAT_PROMPT_JSON, chatRoot, readJSON, genCmd } from '@/utils';
import { syncColumns, getPath } from './config';
import SyncForm from './Form';
@ -20,8 +20,8 @@ const fmtData = (data: Record<string, any>[] = []) =>
export default function SyncCustom() {
const [isVisible, setVisible] = useState(false);
const { modelData, modelSet } = useChatModel('sync_custom', CHAT_MODEL_JSON);
const { modelCacheCmd, modelCacheSet } = useCacheModel();
const { promptData, promptSet } = useChatPrompt('sync_custom', CHAT_PROMPT_JSON);
const { promptCacheCmd, promptCacheSet } = useCachePrompt();
const { opData, opInit, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
const formRef = useRef<any>(null);
@ -32,9 +32,9 @@ export default function SyncCustom() {
};
useEffect(() => {
if (modelData.length <= 0) return;
opInit(modelData);
}, [modelData]);
if (promptData.length <= 0) return;
opInit(promptData);
}, [promptData]);
useEffect(() => {
if (!opInfo.opType) return;
@ -47,7 +47,7 @@ export default function SyncCustom() {
...opInfo?.opRecord,
last_updated: Date.now(),
});
modelSet(data);
promptSet(data);
opInfo.resetRecord();
});
}
@ -59,15 +59,15 @@ export default function SyncCustom() {
try {
const file = await path.join(
await chatRoot(),
'cache_model',
'cache_prompts',
`${opInfo?.opRecord?.id}.json`,
);
await fs.removeFile(file);
} catch (e) {}
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelSet(data);
promptSet(data);
opInfo.resetRecord();
modelCacheCmd();
promptCacheCmd();
})();
}
}, [opInfo.opType, formRef]);
@ -75,15 +75,15 @@ export default function SyncCustom() {
const handleSync = async (filename: string) => {
const record = opInfo?.opRecord;
const isJson = /json$/.test(record?.ext);
const file = await path.join(await chatRoot(), 'cache_model', filename);
const file = await path.join(await chatRoot(), 'cache_prompts', filename);
const filePath = await getPath(record);
// https or http
if (/^http/.test(record?.protocol)) {
const data = await invoke('sync_user_prompts', { url: filePath, dataType: record?.ext });
if (data) {
await modelCacheSet(data as [], file);
await modelCacheCmd();
await promptCacheSet(data as [], file);
await promptCacheCmd();
message.success('ChatGPT Prompts data has been synchronized!');
return true;
} else {
@ -95,27 +95,30 @@ export default function SyncCustom() {
if (isJson) {
// parse json
const data = await readJSON(filePath, { isRoot: true });
await modelCacheSet(fmtData(data), file);
await promptCacheSet(fmtData(data), file);
} else {
// parse csv
const data = await fs.readTextFile(filePath);
const list: Record<string, string>[] = await invoke('parse_prompt', { data });
await modelCacheSet(fmtData(list), file);
await promptCacheSet(fmtData(list), file);
}
await modelCacheCmd();
await promptCacheCmd();
return true;
};
const handleOk = () => {
formRef.current?.form?.validateFields().then((vals: Record<string, any>) => {
formRef.current?.form?.validateFields().then(async (vals: Record<string, any>) => {
const file = await readFile(vals?.file?.file?.originFileObj);
vals.file = file;
if (opInfo.opType === 'new') {
const data = opAdd(vals);
modelSet(data);
promptSet(data);
message.success('Data added successfully');
}
if (opInfo.opType === 'edit') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], vals);
modelSet(data);
promptSet(data);
message.success('Data updated successfully');
}
hide();
@ -153,3 +156,12 @@ export default function SyncCustom() {
</div>
);
}
function readFile(file: File) {
return new Promise((resolve, reject) => {
let reader = new FileReader();
reader.onload = (e: any) => resolve(e.target.result);
reader.onerror = (e: any) => reject(e.target.error);
reader.readAsText(file);
});
}

View File

@ -6,7 +6,7 @@ import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import FilePath from '@/components/FilePath';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useChatPrompt, { useCachePrompt } from '@/hooks/useChatPrompt';
import { useTableRowSelection, TABLE_PAGINATION } from '@/hooks/useTable';
import { fmtDate, chatRoot } from '@/utils';
import { syncColumns } from './config';
@ -17,27 +17,27 @@ const promptsURL = 'https://github.com/f/awesome-chatgpt-prompts/blob/main/promp
export default function SyncPrompts() {
const { rowSelection, selectedRowIDs } = useTableRowSelection();
const [jsonPath, setJsonPath] = useState('');
const { modelJson, modelSet } = useChatModel('sync_prompts');
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { promptJson, promptSet } = useChatPrompt('sync_prompts');
const { promptCacheJson, promptCacheSet } = useCachePrompt(jsonPath);
const { opData, opInit, opReplace, opReplaceItems, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
const lastUpdated = modelJson?.sync_prompts?.last_updated;
const lastUpdated = promptJson?.sync_prompts?.last_updated;
const selectedItems = rowSelection.selectedRowKeys || [];
useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'cache_model', 'chatgpt_prompts.json'));
setJsonPath(await path.join(await chatRoot(), 'cache_prompts', 'chatgpt_prompts.json'));
});
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
if (promptCacheJson.length <= 0) return;
opInit(promptCacheJson);
}, [promptCacheJson.length]);
const handleSync = async () => {
const data = await invoke('sync_prompts', { time: Date.now() });
if (data) {
opInit(data as any[]);
modelSet({
promptSet({
id: 'chatgpt_prompts',
last_updated: Date.now(),
});
@ -47,13 +47,13 @@ export default function SyncPrompts() {
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
promptCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable });
modelCacheSet(data);
promptCacheSet(data);
};
return (
@ -84,7 +84,7 @@ export default function SyncPrompts() {
<div className="chat-table-tip">
<div className="chat-sync-path">
<FilePath url={promptsURL} content="f/awesome-chatgpt-prompts/prompts.csv" />
<FilePath label="CACHE" paths="cache_model/chatgpt_prompts.json" />
<FilePath label="CACHE" paths="cache_prompts/chatgpt_prompts.json" />
</div>
{lastUpdated && (
<span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>

View File

@ -7,7 +7,7 @@ import { path } from '@tauri-apps/api';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import FilePath from '@/components/FilePath';
import { useCacheModel } from '@/hooks/useChatModel';
import { useCachePrompt } from '@/hooks/useChatPrompt';
import { useTableRowSelection, TABLE_PAGINATION } from '@/hooks/useTable';
import { getPath } from '@/view/prompts/SyncCustom/config';
import { fmtDate, chatRoot } from '@/utils';
@ -21,7 +21,7 @@ export default function SyncRecord() {
const state = location?.state;
const { rowSelection, selectedRowIDs } = useTableRowSelection();
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { promptCacheJson, promptCacheSet } = useCachePrompt(jsonPath);
const { opData, opInit, opReplace, opReplaceItems, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(syncColumns());
@ -29,24 +29,24 @@ export default function SyncRecord() {
useInit(async () => {
setFilePath(await getPath(state));
setJsonPath(await path.join(await chatRoot(), 'cache_model', `${state?.id}.json`));
setJsonPath(await path.join(await chatRoot(), 'cache_prompts', `${state?.id}.json`));
});
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
if (promptCacheJson.length <= 0) return;
opInit(promptCacheJson);
}, [promptCacheJson.length]);
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
promptCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable });
modelCacheSet(data);
promptCacheSet(data);
};
return (
@ -70,7 +70,7 @@ export default function SyncRecord() {
<div className="chat-table-tip">
<div className="chat-sync-path">
<FilePath url={filePath} />
<FilePath label="CACHE" paths={`cache_model/${state?.id}.json`} />
<FilePath label="CACHE" paths={`cache_prompts/${state?.id}.json`} />
</div>
{state?.last_updated && (
<span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>

View File

@ -1,6 +1,6 @@
import { Tag, Switch, Space, Popconfirm, Table } from 'antd';
export const modelColumns = () => [
export const promptColumns = () => [
{
title: '/{cmd}',
dataIndex: 'cmd',
@ -54,7 +54,7 @@ export const modelColumns = () => [
<Space size="middle">
<a onClick={() => actions.setRecord(row, 'edit')}>Edit</a>
<Popconfirm
title="Are you sure to delete this model?"
title="Are you sure to delete this prompt?"
onConfirm={() => actions.setRecord(row, 'delete')}
okText="Yes"
cancelText="No"

View File

@ -6,32 +6,32 @@ import useInit from '@/hooks/useInit';
import useData from '@/hooks/useData';
import useColumns from '@/hooks/useColumns';
import FilePath from '@/components/FilePath';
import useChatModel, { useCacheModel } from '@/hooks/useChatModel';
import useChatPrompt, { useCachePrompt } from '@/hooks/useChatPrompt';
import { useTableRowSelection, TABLE_PAGINATION } from '@/hooks/useTable';
import { chatRoot, fmtDate } from '@/utils';
import { modelColumns } from './config';
import { promptColumns } from './config';
import UserCustomForm from './Form';
export default function UserCustom() {
const { rowSelection, selectedRowIDs } = useTableRowSelection();
const [isVisible, setVisible] = useState(false);
const [jsonPath, setJsonPath] = useState('');
const { modelJson, modelSet } = useChatModel('user_custom');
const { modelCacheJson, modelCacheSet } = useCacheModel(jsonPath);
const { promptJson, promptSet } = useChatPrompt('user_custom');
const { promptCacheJson, promptCacheSet } = useCachePrompt(jsonPath);
const { opData, opInit, opReplaceItems, opAdd, opRemove, opReplace, opSafeKey } = useData([]);
const { columns, ...opInfo } = useColumns(modelColumns());
const lastUpdated = modelJson?.user_custom?.last_updated;
const { columns, ...opInfo } = useColumns(promptColumns());
const lastUpdated = promptJson?.user_custom?.last_updated;
const selectedItems = rowSelection.selectedRowKeys || [];
const formRef = useRef<any>(null);
useInit(async () => {
setJsonPath(await path.join(await chatRoot(), 'cache_model', 'user_custom.json'));
setJsonPath(await path.join(await chatRoot(), 'cache_prompts', 'user_custom.json'));
});
useEffect(() => {
if (modelCacheJson.length <= 0) return;
opInit(modelCacheJson);
}, [modelCacheJson.length]);
if (promptCacheJson.length <= 0) return;
opInit(promptCacheJson);
}, [promptCacheJson.length]);
useEffect(() => {
if (!opInfo.opType) return;
@ -40,7 +40,7 @@ export default function UserCustom() {
}
if (['delete'].includes(opInfo.opType)) {
const data = opRemove(opInfo?.opRecord?.[opSafeKey]);
modelCacheSet(data);
promptCacheSet(data);
opInfo.resetRecord();
}
}, [opInfo.opType, formRef]);
@ -48,13 +48,13 @@ export default function UserCustom() {
useEffect(() => {
if (opInfo.opType === 'enable') {
const data = opReplace(opInfo?.opRecord?.[opSafeKey], opInfo?.opRecord);
modelCacheSet(data);
promptCacheSet(data);
}
}, [opInfo.opTime]);
const handleEnable = (isEnable: boolean) => {
const data = opReplaceItems(selectedRowIDs, { enable: isEnable });
modelCacheSet(data);
promptCacheSet(data);
};
const hide = () => {
@ -65,7 +65,7 @@ export default function UserCustom() {
const handleOk = () => {
formRef.current?.form?.validateFields().then(async (vals: Record<string, any>) => {
if (
modelCacheJson.map((i: any) => i.cmd).includes(vals.cmd) &&
promptCacheJson.map((i: any) => i.cmd).includes(vals.cmd) &&
opInfo?.opRecord?.cmd !== vals.cmd
) {
message.warning(
@ -84,9 +84,9 @@ export default function UserCustom() {
default:
break;
}
await modelCacheSet(data);
await promptCacheSet(data);
opInit(data);
modelSet({
promptSet({
id: 'user_custom',
last_updated: Date.now(),
});
@ -115,7 +115,7 @@ export default function UserCustom() {
</div>
</div>
<div className="chat-table-tip">
<FilePath label="CACHE" paths="cache_model/user_custom.json" />
<FilePath label="CACHE" paths="cache_prompts/user_custom.json" />
{lastUpdated && (
<span style={{ marginLeft: 10, color: '#888', fontSize: 12 }}>
Last updated on {fmtDate(lastUpdated)}