diff --git a/com.jeffser.Alpaca.json b/com.jeffser.Alpaca.json index 739646cd..f5d51d42 100644 --- a/com.jeffser.Alpaca.json +++ b/com.jeffser.Alpaca.json @@ -65,7 +65,7 @@ { "type" : "git", "url" : "https://github.com/Jeffser/Alpaca.git", - "tag": "0.2.1" + "tag": "0.2.2" } ] } diff --git a/data/com.jeffser.Alpaca.metainfo.xml.in b/data/com.jeffser.Alpaca.metainfo.xml.in index 4f9e507a..3348f154 100644 --- a/data/com.jeffser.Alpaca.metainfo.xml.in +++ b/data/com.jeffser.Alpaca.metainfo.xml.in @@ -51,6 +51,20 @@ https://github.com/Jeffser/Alpaca https://github.com/sponsors/Jeffser + + https://github.com/Jeffser/Alpaca/releases/tag/0.2.1 + + 0.2.2 Bug fixes + + Toast messages appearing behind dialogs + Local model list not updating when changing servers + Closing the setup dialog closes the whole app + + + Please report any errors to the issues page, thank you. + + + https://github.com/Jeffser/Alpaca/releases/tag/0.2.1 diff --git a/meson.build b/meson.build index fb3b3203..24ff9395 100644 --- a/meson.build +++ b/meson.build @@ -1,5 +1,5 @@ project('Alpaca', - version: '0.2.1', + version: '0.2.2', meson_version: '>= 0.62.0', default_options: [ 'warning_level=2', 'werror=false', ], ) diff --git a/src/connection_handler.py b/src/connection_handler.py index f976957e..7184fc3a 100644 --- a/src/connection_handler.py +++ b/src/connection_handler.py @@ -7,9 +7,9 @@ def simple_get(connection_url:str) -> dict: if response.status_code == 200: return {"status": "ok", "text": response.text, "status_code": response.status_code} else: - return {"status": "error", "text": f"Failed to connect to {connection_url}. Status code: {response.status_code}", "status_code": response.status_code} + return {"status": "error", "status_code": response.status_code} except Exception as e: - return {"status": "error", "text": f"An error occurred while trying to connect to {connection_url}", "status_code": 0} + return {"status": "error", "status_code": 0} def simple_delete(connection_url:str, data) -> dict: try: @@ -19,7 +19,7 @@ def simple_delete(connection_url:str, data) -> dict: else: return {"status": "error", "text": "Failed to delete", "status_code": response.status_code} except Exception as e: - return {"status": "error", "text": f"An error occurred while trying to connect to {connection_url}", "status_code": 0} + return {"status": "error", "status_code": 0} def stream_post(connection_url:str, data, callback:callable) -> dict: try: @@ -31,11 +31,11 @@ def stream_post(connection_url:str, data, callback:callable) -> dict: for line in response.iter_lines(): if line: callback(json.loads(line.decode("utf-8"))) - return {"status": "ok", "text": "All good", "status_code": response.status_code} + return {"status": "ok", "status_code": response.status_code} else: - return {"status": "error", "text": "Error posting data", "status_code": response.status_code} + return {"status": "error", "status_code": response.status_code} except Exception as e: - return {"status": "error", "text": f"An error occurred while trying to connect to {connection_url}", "status_code": 0} + return {"status": "error", "status_code": 0} from time import sleep @@ -58,4 +58,4 @@ def stream_post_fake(connection_url:str, data, callback:callable) -> dict: sleep(.1) data = {"status": msg} callback(data) - return {"status": "ok", "text": "All good", "status_code": 200} + return {"status": "ok", "status_code": 200} diff --git a/src/main.py b/src/main.py index 66ed1e82..51b7d03d 100644 --- a/src/main.py +++ b/src/main.py @@ -48,7 +48,7 @@ def on_about_action(self, widget, _): application_name='Alpaca', application_icon='com.jeffser.Alpaca', developer_name='Jeffry Samuel Eduarte Rojas', - version='0.2.1', + version='0.2.2', developers=['Jeffser https://jeffser.com'], designers=['Jeffser https://jeffser.com'], copyright='© 2024 Jeffser', diff --git a/src/window.py b/src/window.py index 6e143f37..4e73c248 100644 --- a/src/window.py +++ b/src/window.py @@ -43,7 +43,10 @@ class AlpacaWindow(Adw.ApplicationWindow): connection_previous_button = Gtk.Template.Child() connection_next_button = Gtk.Template.Child() connection_url_entry = Gtk.Template.Child() - overlay = Gtk.Template.Child() + main_overlay = Gtk.Template.Child() + pull_overlay = Gtk.Template.Child() + manage_models_overlay = Gtk.Template.Child() + connection_overlay = Gtk.Template.Child() chat_container = Gtk.Template.Child() chat_window = Gtk.Template.Child() message_entry = Gtk.Template.Child() @@ -59,12 +62,33 @@ class AlpacaWindow(Adw.ApplicationWindow): pull_model_status_page = Gtk.Template.Child() pull_model_progress_bar = Gtk.Template.Child() - def show_toast(self, msg:str): + toast_messages = { + "error": [ + "An error occurred", + "Failed to connect to server", + "Could not list local models", + "Could not delete model", + "Could not pull model" + ], + "info": [ + "Please select a model before chatting", + "Conversation cannot be cleared while receiving a message" + ], + "good": [ + "Model deleted successfully", + "Model pulled successfully" + ] + } + + def show_toast(self, message_type:str, message_id:int, overlay): + if message_type not in self.toast_messages or message_id > len(self.toast_messages[message_type] or message_id < 0): + message_type = "error" + message_id = 0 toast = Adw.Toast( - title=msg, + title=self.toast_messages[message_type][message_id], timeout=2 ) - self.overlay.add_toast(toast) + overlay.add_toast(toast) def show_message(self, msg:str, bot:bool, footer:str=None): message_text = Gtk.TextView( @@ -92,6 +116,8 @@ def show_message(self, msg:str, bot:bool, footer:str=None): def update_list_local_models(self): self.local_models = [] response = simple_get(self.ollama_url + "/api/tags") + for i in range(self.model_string_list.get_n_items() -1, -1, -1): + self.model_string_list.remove(i) if response['status'] == 'ok': for model in json.loads(response['text'])['models']: self.model_string_list.append(model["name"]) @@ -99,8 +125,8 @@ def update_list_local_models(self): self.model_drop_down.set_selected(0) return else: - self.show_toast(response['text']) self.show_connection_dialog(True) + self.show_toast("error", 2, self.connection_overlay) def verify_connection(self): response = simple_get(self.ollama_url) @@ -110,18 +136,8 @@ def verify_connection(self): self.message_entry.grab_focus_without_selecting() self.update_list_local_models() return True - else: - response = {"status": "error", "text": f"Unexpected response from {self.ollama_url} : {response['text']}"} - self.show_toast(response['text']) return False - def dialog_response(self, dialog, task): - self.ollama_url = dialog.get_extra_child().get_text() - if dialog.choose_finish(task) == "login": - self.verify_connection() - else: - self.destroy() - def update_bot_message(self, data): if data['done']: formated_datetime = datetime.now().strftime("%Y/%m/%d %H:%M") @@ -147,7 +163,7 @@ def update_bot_message(self, data): def send_message(self): current_model = self.model_drop_down.get_selected_item() if current_model is None: - GLib.idle_add(self.show_toast, "Please pull a model") + GLib.idle_add(self.show_toast, "info", 0, self.main_overlay) return formated_datetime = datetime.now().strftime("%Y/%m/%d %H:%M") self.chats["chats"][self.current_chat_id]["messages"].append({ @@ -169,8 +185,8 @@ def send_message(self): GLib.idle_add(self.send_button.set_sensitive, True) GLib.idle_add(self.message_entry.set_sensitive, True) if response['status'] == 'error': - self.show_toast(f"{response['text']}") - self.show_connection_dialog(True) + GLib.idle_add(self.show_toast, 'error', 1, self.connection_overlay) + GLib.idle_add(self.show_connection_dialog, True) def send_button_activate(self, button): if not self.message_entry.get_text(): return @@ -180,20 +196,17 @@ def send_button_activate(self, button): def delete_model(self, dialog, task, model_name, button): if dialog.choose_finish(task) == "delete": response = simple_delete(self.ollama_url + "/api/delete", data={"name": model_name}) - print(response) if response['status'] == 'ok': button.set_icon_name("folder-download-symbolic") button.set_css_classes(["accent", "pull"]) - self.show_toast(f"Model '{model_name}' deleted successfully") + self.show_toast("good", 0, self.manage_models_overlay) for i in range(self.model_string_list.get_n_items()): if self.model_string_list.get_string(i) == model_name: self.model_string_list.remove(i) self.model_drop_down.set_selected(0) break - elif response['status_code'] == '404': - self.show_toast(f"Delete request failed: Model was not found") else: - self.show_toast(response['text']) + self.show_toast("error", 3, self.connection_overlay) self.manage_models_dialog.close() self.show_connection_dialog(True) @@ -218,11 +231,12 @@ def pull_model(self, dialog, task, model_name, button): GLib.idle_add(button.set_icon_name, "user-trash-symbolic") GLib.idle_add(button.set_css_classes, ["error", "delete"]) GLib.idle_add(self.model_string_list.append, model_name) - GLib.idle_add(self.show_toast, f"Model '{model_name}' pulled successfully") + GLib.idle_add(self.show_toast, "good", 1, self.manage_models_overlay) else: - GLib.idle_add(self.show_toast, response['text']) + GLib.idle_add(self.show_toast, "error", 4, self.connection_overlay) GLib.idle_add(self.manage_models_dialog.close) GLib.idle_add(self.show_connection_dialog, True) + print("pull fail") def pull_model_start(self, dialog, task, model_name, button): @@ -265,9 +279,11 @@ def update_list_available_models(self): self.model_list_box.append(model) def manage_models_button_activate(self, button): + self.manage_models_dialog.present(self) self.update_list_available_models() + def connection_carousel_page_changed(self, carousel, index): if index == 0: self.connection_previous_button.set_sensitive(False) else: self.connection_previous_button.set_sensitive(True) @@ -284,10 +300,12 @@ def connection_next_button_activate(self, button): if self.verify_connection(): self.connection_dialog.force_close() else: - show_connection_dialog(True) + self.show_connection_dialog(True) + self.show_toast("error", 1, self.connection_overlay) def show_connection_dialog(self, error:bool=False): self.connection_carousel.scroll_to(self.connection_carousel.get_nth_page(self.connection_carousel.get_n_pages()-1),False) + if self.ollama_url is not None: self.connection_url_entry.set_text(self.ollama_url) if error: self.connection_url_entry.set_css_classes(["error"]) else: self.connection_url_entry.set_css_classes([]) self.connection_dialog.present(self) @@ -303,7 +321,7 @@ def clear_conversation_dialog_response(self, dialog, task): def clear_conversation_dialog(self): if self.bot_message is not None: - self.show_toast("Conversation cannot be cleared while receiving a message") + self.show_toast("info", 1, self.main_overlay) return dialog = Adw.AlertDialog( heading=f"Clear Conversation", @@ -338,6 +356,37 @@ def load_history(self): self.show_message(message['content'], True, f"\n\n{message['model']}\t|\t{message['date']}") self.bot_message = None + def closing_connection_dialog_response(self, dialog, task): + result = dialog.choose_finish(task) + if result == "cancel": return + if result == "save": + self.ollama_url = self.connection_url_entry.get_text() + self.connection_dialog.force_close() + if self.ollama_url is None or self.verify_connection() == False: + self.show_connection_dialog(True) + self.show_toast("error", 1, self.connection_overlay) + + + def closing_connection_dialog(self, dialog): + if self.get_visible() == False: + self.destroy() + else: + dialog = Adw.AlertDialog( + heading=f"Save Changes?", + body=f"Do you want to save the URL change?", + close_response="cancel" + ) + dialog.add_response("cancel", "Cancel") + dialog.add_response("discard", "Discard") + dialog.add_response("save", "Save") + dialog.set_response_appearance("discard", Adw.ResponseAppearance.DESTRUCTIVE) + dialog.set_response_appearance("save", Adw.ResponseAppearance.SUGGESTED) + dialog.choose( + parent = self, + cancellable = None, + callback = self.closing_connection_dialog_response + ) + def __init__(self, **kwargs): super().__init__(**kwargs) self.manage_models_button.connect("clicked", self.manage_models_button_activate) @@ -348,13 +397,14 @@ def __init__(self, **kwargs): self.connection_previous_button.connect("clicked", self.connection_previous_button_activate) self.connection_next_button.connect("clicked", self.connection_next_button_activate) self.connection_url_entry.connect("changed", lambda entry: entry.set_css_classes([])) - self.connection_dialog.connect("close-attempt", lambda dialog: self.destroy()) + self.connection_dialog.connect("close-attempt", self.closing_connection_dialog) self.load_history() if os.path.exists(os.path.join(self.config_dir, "server.conf")): with open(os.path.join(self.config_dir, "server.conf"), "r") as f: self.ollama_url = f.read() - if self.verify_connection() is False: self.show_connection_dialog() + if self.verify_connection() is False: self.show_connection_dialog(True) else: self.connection_dialog.present(self) + self.show_toast("funny", True, self.manage_models_overlay) diff --git a/src/window.ui b/src/window.ui index d0c9b7bc..f93addbd 100644 --- a/src/window.ui +++ b/src/window.ui @@ -5,7 +5,7 @@ True - + @@ -118,23 +118,28 @@ + false 400 - + - - true - true - 24 - 24 - 24 - 24 - Pulling Model + - - true + + true + true + 24 + 24 + 24 + 24 + Pulling Model + + + true + + @@ -142,39 +147,44 @@ + true 400 600 - - - - - - Manage models - - - - + - - true - true - 0 - 24 - 24 - 24 + + + + + + Manage models + + + + - + true true + 0 + 24 + 24 + 24 - - none - + + true + true + + + none + + + @@ -184,103 +194,108 @@ + false 450 450 - - - - - - - - true - 5 - 5 - 5 - - - Previous - Previous - false - - - - - - connection_carousel + + + + + - - - Next - Next - + + + true + 5 + 5 + 5 + + + Previous + Previous + false + + + + + + connection_carousel + + + + + Next + Next + + + - - - - - true - true - true - true - 12 - + true true - 12 - 12 - 12 - 12 - com.jeffser.Alpaca - Welcome to Alpaca - To get started, please ensure you have an Ollama instance set up. You can either run Ollama locally on your machine or connect to a remote instance. + true + true + 12 - - Ollama Website - https://ollama.com/ + + true + true 12 + 12 + 12 + 12 + com.jeffser.Alpaca + Welcome to Alpaca + To get started, please ensure you have an Ollama instance set up. You can either run Ollama locally on your machine or connect to a remote instance. + + + Ollama Website + https://ollama.com/ + 12 + + - - - - - true - true - 12 - 12 - 12 - 12 - dialog-warning-symbolic - Disclaimer - Alpaca and its developers are not liable for any damages to devices or software resulting from the execution of code generated by an AI model. Please exercise caution and review the code carefully before running it. - - - - - true - true - 12 - 12 - 12 - 12 - preferences-other-symbolic - Setup - If you are running an Ollama instance locally and haven't modified the default ports, you can use the default URL. Otherwise, please enter the URL of your Ollama instance. - - http://localhost:11434 + + true + true + 12 + 12 + 12 + 12 + dialog-warning-symbolic + Disclaimer + Alpaca and its developers are not liable for any damages to devices or software resulting from the execution of code generated by an AI model. Please exercise caution and review the code carefully before running it. + + + + + true + true + 12 + 12 + 12 + 12 + preferences-other-symbolic + Setup + If you are running an Ollama instance locally and haven't modified the default ports, you can use the default URL. Otherwise, please enter the URL of your Ollama instance. + + + http://localhost:11434 + + @@ -290,6 +305,7 @@ +
0.2.2 Bug fixes
+ Please report any errors to the issues page, thank you. +