feat: chatgpt prompts

This commit is contained in:
lencx 2022-12-17 14:29:46 +08:00
parent 47c9072f40
commit 1e7c0fe02a
10 changed files with 178 additions and 1 deletions

View File

@ -47,6 +47,21 @@ tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true }
~~~
## 📢 公告
这是一个令人兴奋的重大更新。像 `Telegram 机器人指令` 那样工作,帮助你快速填充自定模型,来让 ChatGPT 按照你想要的方式去工作。这个项目倾注了我大量业余时间,如果它对你有所帮助,宣传转发,或者 star 都是对我的巨大鼓励。我希望我可以持续更新下去,加入更多有趣的功能。
### 如何使用指令?
你可以从 [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) 来寻找有趣的功能来导入到应用。
![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png)
数据导入完成后,可以重新启动应用来使配置生效(`Menu -> Preferences -> Restart ChatGPT`)。
在 ChatGPT 文本输入区域,键入 `/` 开头的字符,则会弹出指令提示,按下空格键,它会默认将命令关联的文本填充到输入区域(注意:如果包含多个指令提示,它只会选择第一个作为填充,你可以持续输入,直到第一个提示命令为你想要时,再按下空格键。或者使用鼠标来点击多条指令中的某一个)。填充完成后,你只需要按下回车键即可。
## ✨ 功能概览
- 跨平台: `macOS` `Linux` `Windows`

View File

@ -48,6 +48,21 @@ tap repo, "https://github.com/#{repo}.git"
cask "popcorn-time", args: { "no-quarantine": true }
~~~
## 📢 Announcement
This is a major and exciting update. It works like a `Telegram bot command` and helps you quickly populate custom models to make chatgpt work the way you want it to. This project has taken a lot of my spare time, so if it helps you, please help spread the word or star it would be a great encouragement to me. I hope I can keep updating it and adding more interesting features.
### How does it work?
You can look at [awesome-chatgpt-prompts](https://github.com/f/awesome-chatgpt-prompts) to find interesting features to import into the app.
![chat cmd](./assets/chat-cmd-1.png)
![chat cmd](./assets/chat-cmd-2.png)
After the data import is done, you can restart the app to make the configuration take effect (`Menu -> Preferences -> Restart ChatGPT`).
In the chatgpt text input area, type a character starting with `/` to bring up the command prompt, press the spacebar, and it will fill the input area with the text associated with the command by default (note: if it contains multiple command prompts, it will only select the first one as the fill, you can keep typing until the first prompted command is the one you want, then press the spacebar. Or use the mouse to click on one of the multiple commands). When the fill is complete, you simply press the Enter key.
## ✨ Features
- Multi-platform: `macOS` `Linux` `Windows`

BIN
assets/chat-cmd-1.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.9 MiB

BIN
assets/chat-cmd-2.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 294 KiB

View File

@ -63,4 +63,11 @@ pub fn form_msg(app: AppHandle, label: &str, title: &str, msg: &str) {
#[command]
pub fn open_file(path: PathBuf) {
utils::open_file(path);
}
#[command]
pub fn get_chat_model() -> serde_json::Value {
let path = utils::chat_root().join("chat.model.json");
let content = fs::read_to_string(path).unwrap_or_else(|_| r#"{"data":[]}"#.to_string());
serde_json::from_str(&content).unwrap()
}

View File

@ -19,6 +19,7 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
std::thread::spawn(move || {
#[cfg(target_os = "macos")]
WindowBuilder::new(&app, "core", WindowUrl::App(url.into()))
.title("ChatGPT")
.resizable(true)
.fullscreen(false)
.inner_size(800.0, 600.0)
@ -31,6 +32,7 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
.build().unwrap();
@ -47,6 +49,7 @@ pub fn init(app: &mut App) -> std::result::Result<(), Box<dyn std::error::Error>
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_window)
.build().unwrap();
});

View File

@ -8,6 +8,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
std::thread::spawn(move || {
WindowBuilder::new(&app, "tray", WindowUrl::App(chat_conf.origin.into()))
.title("ChatGPT")
.resizable(false)
.fullscreen(false)
.inner_size(360.0, 540.0)
@ -19,6 +20,7 @@ pub fn tray_window(handle: &tauri::AppHandle) {
.initialization_script(include_str!("../assets/jspdf.js"))
.initialization_script(include_str!("../assets/core.js"))
.initialization_script(include_str!("../assets/export.js"))
.initialization_script(include_str!("../assets/cmd.js"))
.user_agent(&chat_conf.ua_tray)
.build()
.unwrap()

130
src-tauri/src/assets/cmd.js vendored Normal file
View File

@ -0,0 +1,130 @@
function init() {
const styleDom = document.createElement('style');
styleDom.innerHTML = `form {
position: relative;
}
.chat-model-cmd-list {
position: absolute;
width: 400px;
bottom: 60px;
max-height: 100px;
overflow: auto;
z-index: 9999;
}
.chat-model-cmd-list>div {
border: solid 2px #d8d8d8;
border-radius: 5px;
background-color: #fff;
}
.chat-model-cmd-list .cmd-item {
font-size: 12px;
border-bottom: solid 1px #888;
padding: 2px 4px;
display: flex;
user-select: none;
cursor: pointer;
}
.chat-model-cmd-list .cmd-item:last-child {
border-bottom: none;
}
.chat-model-cmd-list .cmd-item b {
display: inline-block;
width: 120px;
border-radius: 4px;
margin-right: 10px;
color: #2a2a2a;
}
.chat-model-cmd-list .cmd-item i {
width: 270px;
overflow: hidden;
text-overflow: ellipsis;
white-space: nowrap;
text-align: right;
color: #888;
}`;
document.head.append(styleDom);
if (window.formInterval) {
clearInterval(window.formInterval);
}
window.formInterval = setInterval(() => {
const form = document.querySelector("form");
if (!form) return;
clearInterval(window.formInterval);
cmdTip();
}, 200);
}
async function cmdTip() {
const chatModelJson = await invoke('get_chat_model') || {};
if (!chatModelJson.data && chatModelJson.data.length <= 0) return;
const data = chatModelJson.data || [];
const modelDom = document.createElement('div');
modelDom.classList.add('chat-model-cmd-list');
document.querySelector('form').appendChild(modelDom);
const itemDom = (v) => `<div class="cmd-item" data-prompt="${encodeURIComponent(v.prompt)}"><b>/${v.cmd}</b><i>${v.act}</i></div>`;
const searchInput = document.querySelector('form textarea');
searchInput.addEventListener('input', debounce(function() {
const query = this.value;
console.log(query);
if (!query || !/^\//.test(query)) {
modelDom.innerHTML = '';
return;
}
const result = data.filter(i => i.enable && new RegExp(query.substring(1)).test(i.cmd));
if (result.length > 0) {
modelDom.innerHTML = `<div>${result.map(itemDom).join('')}</div>`;
}
// Enter a command starting with `/` and press a space to automatically fill `chatgpt prompt`.
// If more than one command appears in the search results, the first one will be used by default.
searchInput.addEventListener('keydown', (event) => {
if (event.keyCode === 32) {
searchInput.value = result[0]?.prompt.trim();
}
if (event.keyCode = 13) {
modelDom.innerHTML = '';
}
});
}, 250),
{
capture: false,
passive: true,
once: false
});
if (window.searchInterval) {
clearInterval(window.searchInterval);
}
window.searchInterval = setInterval(() => {
// The `chatgpt prompt` fill can be done by clicking on the event.
const searchDom = document.querySelector("form .chat-model-cmd-list>div");
if (!searchDom) return;
searchDom.addEventListener('click', (event) => {
// .cmd-item
const item = event.target.closest("div");
if (item) {
document.querySelector('form textarea').value = decodeURIComponent(item.getAttribute('data-prompt'));
document.querySelector('form textarea').focus();
}
});
}, 200);
}
function debounce(fn, delay) {
let timeoutId;
return function(...args) {
clearTimeout(timeoutId);
timeoutId = setTimeout(() => fn.apply(this, args), delay);
};
}
if (
document.readyState === "complete" ||
document.readyState === "interactive"
) {
init();
} else {
document.addEventListener("DOMContentLoaded", init);
}

View File

@ -27,6 +27,7 @@ fn main() {
cmd::form_confirm,
cmd::form_msg,
cmd::open_file,
cmd::get_chat_model,
])
.setup(setup::init)
.plugin(tauri_plugin_positioner::init())

View File

@ -1,5 +1,5 @@
import { useState, useRef, useEffect } from 'react';
import { Table, Button, Modal } from 'antd';
import { Table, Button, Modal, message } from 'antd';
import { invoke } from '@tauri-apps/api';
import useChatModel from '@/hooks/useChatModel';
@ -38,6 +38,10 @@ export default function LanguageModel() {
const handleOk = () => {
formRef.current?.form?.validateFields()
.then((vals: Record<string, any>) => {
if (modelData.map((i: any) => i.cmd).includes(vals.cmd) && opInfo?.opRecord?.cmd !== vals.cmd) {
message.warning(`"cmd: /${vals.cmd}" already exists, please change the "${vals.cmd}" name and resubmit.`);
return;
}
let data = [];
switch (opInfo.opType) {
case 'new': data = opAdd(vals); break;