Skip to content

Commit

Permalink
Merge pull request #56 from yukiarimo/main
Browse files Browse the repository at this point in the history
merge main into dev
  • Loading branch information
yukiarimo authored Jan 26, 2024
2 parents 0b8eb1f + 26e3346 commit 0e4c808
Show file tree
Hide file tree
Showing 12 changed files with 123 additions and 50 deletions.
6 changes: 2 additions & 4 deletions index.py
Original file line number Diff line number Diff line change
Expand Up @@ -181,9 +181,7 @@ def yuna_server(self):

# send flash message "Hello, {username}!"
flash(f'Hello, {current_user.get_id()}!')

return send_from_directory('.', 'yuna.html')
#return 'Hello, {}!'.format(current_user.get_id())

def handle_image_request(self):
data = request.get_json()
Expand All @@ -196,10 +194,10 @@ def handle_image_request(self):
chat_id = data['chat']

chat_history = self.chat_history_manager.load_chat_history(chat_id)
chat_history.append({"name": "Yuki", "message": prompt})
chat_history.append({"name": self.config['ai']['names'][0], "message": prompt})

created_image = create_image(prompt)
chat_history.append({"name": "Yuna", "message": f"Sure, here you go! <img src='img/art/{created_image}' class='image-message'>"})
chat_history.append({"name": self.config['ai']['names'][1], "message": f"Sure, here you go! <img src='img/art/{created_image}' class='image-message'>"})

self.chat_history_manager.save_chat_history(chat_history, chat_id)
yuna_image_message = f"Sure, here you go! <img src='img/art/{created_image}' class='image-message'>"
Expand Down
17 changes: 14 additions & 3 deletions index.sh
Original file line number Diff line number Diff line change
Expand Up @@ -25,16 +25,18 @@ install_update_dependencies() {
echo "1. CPU"
echo "2. NVIDIA GPU"
echo "3. AMD GPU"
echo "4. Go back"
echo "4. Metal"
echo "5. Go back"

read -p "> " install_choice

case $install_choice in
1) install_cpu;;
2) install_nvidia;;
3) install_amd;;
4) return;;
*) echo "Invalid choice. Please enter a number between 1 and 4.";;
4.) install_metal;;
5) return;;
*) echo "Invalid choice. Please enter a number between 1 and 5.";;
esac
done
}
Expand All @@ -61,6 +63,15 @@ install_amd() {

}

install_metal() {
echo "Installing Metal dependencies..."
CT_METAL=1 pip install ctransformers --no-binary ctransformers
pip install -r requirements-macos.txt
echo "Metal dependencies installed."

}


# Submenu for configure()
configure_submenu() {
while true; do
Expand Down
8 changes: 4 additions & 4 deletions lib/generate.py
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ def generate(self, chat_id, speech=False, text="", template=None, chat_history_m
if name and message:
history += f'{name}: {message}\n'

text_of_history = f"{history}Yuki: {text}\nYuna:"
text_of_history = f"{history}{self.config['ai']['names'][0]}: {text}\{self.config['ai']['names'][1]}:"

tokenized_history = self.model.tokenize(text_of_history)

Expand Down Expand Up @@ -154,10 +154,10 @@ def generate(self, chat_id, speech=False, text="", template=None, chat_history_m
# response = self.clearText(str(response))

if template != "himitsuCopilot" and template != "himitsuCopilotGen" and template != "summary" and template != None:
chat_history.append({"name": "Yuki", "message": text})
chat_history.append({"name": "Yuna", "message": response})
chat_history.append({"name": self.config['ai']['names'][0], "message": text})
chat_history.append({"name": self.config['ai']['names'][1], "message": response})
chat_history_manager.save_chat_history(chat_history, chat_id)

if speech==True:
chat_history_manager.generate_speech(response)
return response
Expand Down
10 changes: 9 additions & 1 deletion lib/history.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,12 @@ def __init__(self, config):
self.config = config

def create_chat_history_file(self, chat_id):
history_starting_template = [{"name": "Yuki", "message": "Hi"}, {"name": "Yuna", "message": "Hello"},{"name": "Yuki", "message": "How are you doing?"}, {"name": "Yuna", "message": "I'm doing great! Thanks for asking!"}]
history_starting_template = [
{"name": self.config['ai']['names'][0], "message": "Hi"},
{"name": self.config['ai']['names'][1], "message": "Hello"},
{"name": self.config['ai']['names'][0], "message": "How are you doing?"},
{"name": self.config['ai']['names'][1], "message": "I'm doing great! Thanks for asking!"}
]
chat_history_json = json.dumps(history_starting_template)
encrypted_chat_history = self.encrypt_data(chat_history_json)
with open(os.path.join(self.config["server"]["history"], chat_id), 'wb') as file:
Expand Down Expand Up @@ -66,6 +71,9 @@ def decrypt_data(self, encrypted_data):

def save_chat_history(self, chat_history, chat):
history_path = os.path.join(self.config["server"]["history"], chat)
#chat_history_json = json.dumps(chat_history)
if isinstance(chat_history, set):
chat_history = list(chat_history)
chat_history_json = json.dumps(chat_history)
encrypted_chat_history = self.encrypt_data(chat_history_json)
with open(history_path, 'wb') as file:
Expand Down
38 changes: 30 additions & 8 deletions menu.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,23 @@

def _define_layout() -> ptg.Layout:
layout = ptg.Layout()
layout.add_slot("Body")
layout.add_slot(name='Header', height=1)
layout.add_break()
layout.add_slot(name='Body')
layout.add_slot(name='Body right', width=0.4)
layout.add_break()
layout.add_slot(name='Footer', height=1)
return layout

windows = {}
manager = ptg.WindowManager()
title_label = ptg.Label("[210 bold]========== Menu ==========")
header = ptg.Window(
"[210 bold] Yuna Management Script",
box="EMPTY",
)
footer = ptg.Window(ptg.Button("Quit", lambda *_: manager.stop()), box="EMPTY")
layout_ = _define_layout()
manager.layout = layout_

def info(event):
os.system('clear')
Expand All @@ -41,9 +51,10 @@ def install_update_dependencies(event):
ptg.Button("CPU", onclick=install_cpu),
ptg.Button("NVIDIA GPU", onclick=install_nvidia),
ptg.Button("AMD GPU", onclick=install_amd),
ptg.Button("Metal", onclick=install_metal),
ptg.Button("Back", onclick=lambda event: manager.remove(windows['configure_gpu']))
)
manager.add(windows['configure_gpu'])
manager.add(windows['configure_gpu'], assign=True)
manager.focus(windows['configure_gpu'])

def install_cpu(event):
Expand Down Expand Up @@ -71,6 +82,16 @@ def install_amd(event):
manager.remove(windows['configure_gpu'])
manager.focus(main_menu)

def install_metal(event):
print("Installing Metal dependencies...")
subprocess.check_call("CT_METAL=1", [sys.executable, "-m", "pip", "install", "ctransformers", "--no-binary", "ctransformers"])
subprocess.check_call([sys.executable, "-m", "pip", "install", "-r", "requirements-macos.txt"])
print("Metal dependencies installed!")
if 'configure_gpu' in windows:
manager.remove(windows['configure_gpu'])
manager.focus(main_menu)


def configure_submenu(event):
windows['configure_menu'] = ptg.Window(
ptg.Label("[210 bold]========== Install =========="),
Expand All @@ -80,7 +101,7 @@ def configure_submenu(event):
ptg.Button("Restore", onclick=restore),
ptg.Button("Back", onclick=lambda event: manager.remove(windows['configure_menu']))
)
manager.add(windows['configure_menu'])
manager.add(windows['configure_menu'], assign=True)
manager.focus(windows['configure_menu'])

def install_models(event):
Expand All @@ -95,7 +116,7 @@ def install_models(event):
ptg.Button("Back", onclick=lambda event: manager.remove(windows['configure_model']))
)

manager.add(windows['configure_model'])
manager.add(windows['configure_model'], assign=True)
manager.focus(windows['configure_model'])

def install_all_models(event):
Expand Down Expand Up @@ -132,7 +153,7 @@ def clear_models(event):
ptg.Button("Yes", onclick=clear_models_confirm),
ptg.Button("No", onclick=lambda event: manager.remove(windows['clear_models'])),
)
manager.add(windows['clear_models'])
manager.add(windows['clear_models'], assign=True)
manager.focus(windows['clear_models'])

def clear_models_confirm(event):
Expand All @@ -158,7 +179,7 @@ def OneClickInstall(event):
install_models(event)

main_menu = ptg.Window(
title_label,
ptg.Label("[210 bold]========== Menu =========="),
ptg.Button("Start Yuna", onclick=start_yuna),
ptg.Button("Install or Update dependencies", onclick=install_update_dependencies),
ptg.Button("One Click Install", onclick=OneClickInstall),
Expand All @@ -167,6 +188,7 @@ def OneClickInstall(event):
ptg.Button("Exit", onclick=goodbye),
ptg.Button("Info", onclick=info),
)
manager.layout = layout_
manager.add(header)
manager.add(main_menu)
#manager.add(footer)
manager.run()
1 change: 1 addition & 0 deletions requirements-amd.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ html2text
bs4
itsdangerous
cryptography
pytermgui
--extra-index-url https://download.pytorch.org/whl/rocm5.6
torch
torchvision
Expand Down
14 changes: 14 additions & 0 deletions requirements-macos.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
flask
flask_cors
flask_login
openai-whisper
pydub
torch
torchvision
torchaudio
transformers
diffusers
html2text
bs4
itsdangerous
cryptography
1 change: 1 addition & 0 deletions requirements-nvidia.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ html2text
bs4
itsdangerous
cryptography
pytermgui
--extra-index-url https://download.pytorch.org/whl/cu118
torch
torchvision
Expand Down
5 changes: 1 addition & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,9 @@ torch
torchvision
torchaudio
transformers
ctransformers
diffusers
html2text
bs4
itsdangerous
cryptography
CT_METAL=1 pip install ctransformers --no-binary ctransformers
itsdangerous
cryptography
CT_METAL=1 pip install ctransformers --no-binary ctransformers
4 changes: 2 additions & 2 deletions static/config.json
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
"stream": false,
"batch_size": 128,
"threads": -1,
"gpu_layers": 50
"gpu_layers": 0
},
"server": {
"port": "",
Expand All @@ -43,7 +43,7 @@
"art_default_model": "any_loli.safetensors",
"prompts": "static/db/prompts/",
"default_prompt_file": "dialog.txt",
"device": "mps"
"device": "cpu"
},
"security": {
"secret_key": "YourSecretKeyHere123!",
Expand Down
28 changes: 19 additions & 9 deletions static/js/himitsu.js
Original file line number Diff line number Diff line change
@@ -1,4 +1,14 @@
// Class constructor for different prompts
var name1;
var name2;

async function loadConfig() {
const response = await fetch('../../config.json');
const data = await response.json();
name1 = data.ai.names[0];
name2 = data.ai.names[1];
}


class PromptTemplate {
constructor(fields, templateInputs) {
this.fields = fields;
Expand Down Expand Up @@ -252,9 +262,9 @@ function sendGeneratedTextToServer(generatedText) {
.then((data) => {
messageManager.removeBr();
messageManager.removeTypingBubble();

loadConfig();
const messageData = {
name: 'Yuna',
name: name2,
message: data.response,
};

Expand All @@ -269,9 +279,9 @@ function sendGeneratedTextToServer(generatedText) {
})
.catch((error) => {
messageManager.removeTypingBubble();

loadConfig();
const messageData = {
name: 'Yuna',
name: name2,
message: error,
};

Expand All @@ -282,9 +292,9 @@ function sendGeneratedTextToServer(generatedText) {
} else {
messageManager.removeBr();
messageManager.removeTypingBubble();

loadConfig();
const messageData = {
name: 'Yuna',
name: name2,
message: data.response,
};

Expand All @@ -300,9 +310,9 @@ function sendGeneratedTextToServer(generatedText) {
})
.catch((error) => {
messageManager.removeTypingBubble();

loadConfig();
const messageData = {
name: 'Yuna',
name: name2,
message: error,
};

Expand Down
Loading

0 comments on commit 0e4c808

Please sign in to comment.