Skip to content

Commit

Permalink
Release 1.0.1 (#75)
Browse files Browse the repository at this point in the history
* style(logo): final mvp version

* refactor: vector store files in seperate directory

* up manifest

* fix(style): rag toggle icon covers text

* style(onboarding): imroved ux

* up manifest
  • Loading branch information
Leo310 authored Apr 5, 2024
1 parent 1c67929 commit 3d523f8
Show file tree
Hide file tree
Showing 9 changed files with 57 additions and 31 deletions.
2 changes: 1 addition & 1 deletion manifest.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"description": "Interact with your privacy focused assistant, leveraging Ollama or OpenAI, making your second brain even smarter.",
"author": "Leo310, nicobrauchtgit",
"authorUrl": "https://github.com/nicobrauchtgit",
"version": "1.0.0",
"version": "1.0.1",
"minAppVersion": "1.5.0",
"isDesktopOnly": true
}
6 changes: 3 additions & 3 deletions src/components/Chat/Chat.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@
let isAutoScrolling = true;
let chatWindow: HTMLDivElement;
$: if (chatWindow && $papaState === 'running' && isAutoScrolling && $chatHistory) {
$: if (chatWindow && $papaState === 'running' && isAutoScrolling && $runContent) {
chatWindow.scrollTop = chatWindow.scrollHeight;
}
let contentNode: HTMLElement;
Expand Down Expand Up @@ -65,7 +65,7 @@
<div
bind:this={chatWindow}
on:scroll={() => (isAutoScrolling = chatWindow.scrollTop + chatWindow.clientHeight + 1 >= chatWindow.scrollHeight)}
class="chat-window w-full flex-grow select-text overflow-y-scroll rounded-md border border-solid border-[--background-modifier-border] bg-[--background-primary]"
class="chat-window w-full pb-8 flex-grow select-text overflow-y-scroll rounded-md border border-solid border-[--background-modifier-border] bg-[--background-primary]"
>
{#each $chatHistory as message (message.id)}
<MessageContainer role={message.role}>
Expand Down Expand Up @@ -148,5 +148,5 @@
{/if}
</div>
<InputComponent bind:textarea />
<span class="mb-3" />
<div class="mb-3" />
</div>
10 changes: 8 additions & 2 deletions src/components/Onboarding/OllamaApp.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,14 @@
import { isOllamaRunning } from '../../controller/Ollama';
import OllamaSetup from './OllamaSetup.svelte';
import { t } from 'svelte-i18n';
import { afterUpdate } from 'svelte';
export let osType: string;
export let scrollToBottom = () => {};
afterUpdate(() => {
scrollToBottom();
});
let isRunning: boolean = false;
let isOllamaTested: boolean = false;
Expand Down Expand Up @@ -52,11 +58,11 @@
<li>
{$t('onboarding.ollama.app.restart')}<span aria-label={$t('onboarding.ollama.app.restart_label')} use:icon={'help-circle'} />
</li>
{:else}
{:else if osType === 'Windows_NT'}
<li>{$t('onboarding.ollama.app.quit')}<span aria-label={$t('onboarding.ollama.app.quit_label')} use:icon={'help-circle'} /></li>
<li>{$t('onboarding.ollama.app.start_origins')}</li>
<div class="w-max max-w-full text-xs *:flex *:rounded *:pr-1" use:renderMarkdown={(this, '```bash\n$env:OLLAMA_ORIGINS="*"; ollama serve\n```')} />
{/if}
<OllamaSetup />
<OllamaSetup {scrollToBottom} />
{/if}
</ol>
3 changes: 2 additions & 1 deletion src/components/Onboarding/OllamaDaemon.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
import { t } from 'svelte-i18n';
export let osType: string;
export let scrollToBottom = () => {};
onMount(() => {
$data.isIncognitoMode = true;
Expand All @@ -34,5 +35,5 @@
</li>
<li>{$t('onboarding.ollama.deamon.start')}</li>
<div class="w-max max-w-full text-xs *:flex *:rounded *:pr-1" use:renderMarkdown={(this, '```bash\nOLLAMA_ORIGINS="*" ollama serve\n```')} />
<OllamaSetup />
<OllamaSetup {scrollToBottom} />
</ol>
14 changes: 10 additions & 4 deletions src/components/Onboarding/OllamaSetup.svelte
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
<script lang="ts">
import InitButtonComponent from './InitButton.svelte';
import { onMount } from 'svelte';
import { afterUpdate, onMount } from 'svelte';
import { getOllamaModels } from '../../controller/Ollama';
import { icon } from '../../controller/Messages';
import { plugin, data } from '../../store';
Expand All @@ -9,6 +9,12 @@
import { t } from 'svelte-i18n';
import PullOllamaModel from './PullOllamaModel.svelte';
export let scrollToBottom = () => {};
afterUpdate(() => {
scrollToBottom();
});
let ollamaModels: string[] = [];
let model: string = '';
let ollamaModelComponent: DropdownComponent;
Expand Down Expand Up @@ -51,14 +57,14 @@
{#if isOrigin}
<li>
{$t('onboarding.ollama.install_model')}<br />
<PullOllamaModel {pullModel} text="Recommended" onSuccessfulPull={async () => (ollamaModels = await getOllamaModels())} />
<PullOllamaModel {pullModel} text={$t('onboarding.ollama.recommended')} onSuccessfulPull={async () => (ollamaModels = await getOllamaModels())} />
</li>
{#if ollamaModels.length > 0}
<li>
<div class="flex flex-wrap items-center justify-between">
{$t('onboarding.ollama.set_model')}
<div class="flex items-center gap-1">
<button class="clickable-icon mr-1" use:icon={'refresh-ccw'} on:click={async () => (ollamaModels = await getOllamaModels())} />
<button class="clickable-icon" use:icon={'refresh-ccw'} on:click={async () => (ollamaModels = await getOllamaModels())} />
<DropdownComponent
bind:this={ollamaModelComponent}
selected={model}
Expand All @@ -70,7 +76,7 @@
</li>
{/if}
{#if model !== ''}
<div class="mt-4 w-full text-center">
<div class="my-4 w-full text-center">
<InitButtonComponent />
</div>
{/if}
Expand Down
21 changes: 13 additions & 8 deletions src/components/Onboarding/Onboarding.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -2,20 +2,25 @@
import { data } from '../../store';
import * as os from 'os';
import SliderComponent from '../base/Slider.svelte';
import AppComponent from './OllamaApp.svelte';
import OllamaAppComponent from './OllamaApp.svelte';
import OpenAiComponent from './OpenAI.svelte';
import DaemonComponent from './OllamaDaemon.svelte';
import OllamaDaemonComponent from './OllamaDaemon.svelte';
import IncognitoToggle from '../Settings/IncognitoToggle.svelte';
import { t } from 'svelte-i18n';
import Logo from '../Logos/Logo.svelte';
const osType = os.type();
const options = ['Ollama App', 'Ollama Daemon'];
let selected: 'Ollama App' | 'Ollama Daemon' = 'Ollama App';
let selected: 'Ollama App' | 'Ollama Daemon' = osType === 'Linux' ? 'Ollama Daemon' : 'Ollama App';
let onboardingWindow: HTMLDivElement;
function scrollToBottom() {
onboardingWindow.scrollTop = onboardingWindow.scrollHeight;
}
</script>

<div class="mx-auto flex h-full w-full max-w-[500px] flex-col items-center overflow-auto pt-8">
<div bind:this={onboardingWindow} class="mx-auto flex h-full w-full max-w-[500px] flex-col items-center overflow-auto p-8">
<div class="mb-2 h-40 w-40">
<Logo />
</div>
Expand All @@ -29,15 +34,15 @@
{#if osType === 'Darwin'}
<SliderComponent {options} bind:selected />
{/if}
{#if selected === 'Ollama App' || osType === 'Windows_NT'}
<AppComponent {osType} />
{#if selected === 'Ollama App'}
<OllamaAppComponent {osType} {scrollToBottom} />
{:else}
<DaemonComponent {osType} />
<OllamaDaemonComponent {osType} {scrollToBottom} />
{/if}
{:else}
<p class="px-10">
{$t('onboarding.openai_mode_note')}
</p>
<OpenAiComponent />
<OpenAiComponent {scrollToBottom} />
{/if}
</div>
11 changes: 10 additions & 1 deletion src/components/Onboarding/OpenAI.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,13 @@
import { plugin, data } from '../../store';
import InitButtonComponent from './InitButton.svelte';
import { t } from 'svelte-i18n';
import { afterUpdate } from 'svelte';
export let scrollToBottom = () => {};
afterUpdate(() => {
scrollToBottom();
});
let openAIApiKey: string = $data.openAIGenModel.openAIApiKey;
let isValid: boolean = false;
Expand Down Expand Up @@ -62,5 +69,7 @@
</li>
</ol>
{#if isValid}
<InitButtonComponent />
<div class="w-full text-center">
<InitButtonComponent />
</div>
{/if}
1 change: 0 additions & 1 deletion src/components/Onboarding/PullOllamaModel.svelte
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@
let progress: number = 0;
let status: string = '';
let isPullingError = false;
console.log('pullModel', pullModel);
async function pullOllamaModelStream() {
isPullingModel = true;
Expand Down
20 changes: 10 additions & 10 deletions src/lang/en.json
Original file line number Diff line number Diff line change
Expand Up @@ -73,35 +73,35 @@
"input_placeholder": "Chat with your Smart Second Brain..."
},
"onboarding": {
"welcome_msg": "Welcome to your Smart Second Brain! Your personal AI assistant that helps you to find information in your notes.",
"welcome_msg": "Welcome to your Smart Second Brain! Your personal AI assistant that helps you to find and process information in your notes.",
"setup": "Setup",
"test": "Test",
"privacy_mode_note": "Your assistant is running in privacy mode. That means it is not connected to the internet and is running fully locally by leveraging Ollama.",
"openai_mode_note": "Your assistant is using third party services to run. That means you will have to share all your personal information with these services and your Smart Second Brain needs to be connected to the internet to leverage OpenAIs large language models like ChatGPT.",
"privacy_mode_note": "Your assistant will run in privacy mode. That means it is not connected to the internet and is running fully locally by leveraging Ollama.",
"openai_mode_note": "Your assistant will use third-party services to run. That means you will have to share all your personal information with these services and your Smart Second Brain needs to be connected to the internet to leverage OpenAIs large language models like ChatGPT.",
"init": "Start your Smart Second Brain",
"init_label": "Click to Start",
"ollama": {
"deamon": {
"install": "Install Ollama",
"install": "Install Ollama in the terminal:",
"set_baseurl": "Set the Ollama Base URL",
"start": "Start the Ollama with origins"
"start": "Start Ollama by running this command in the terminal"
},
"app": {
"download": "Download the App",
"download_link": "here",
"extract": "Extract the .zip and start Ollama",
"run": "Run the setup.exe",
"test_label": "Test if Ollama is running",
"set_origins": "Set Ollama origins to enable streaming responses",
"set_origins": "In the terminal set Ollama origins to enable streaming responses:",
"restart": "Restart the Ollama service ",
"restart_label": "Click menu bar icon and then quit",
"restart_label": "Click Ollamas menu bar icon and then quit",
"quit": "Quit the Ollama service ",
"quit_label": "Click menu bar icon and then quit",
"start_origins": "Start the Ollama service with origins"
"quit_label": "Click Ollamas menu bar icon and then quit",
"start_origins": "Start Ollama by running this command in the powershell"
},
"test_origins": "Test if the origins are set correctly",
"install_model": "Install an Ollama Embedding Model.",
"recommended_models": "Recommended:",
"recommended": "Recommended: ",
"set_model": "Set your Embedding Model:"
},
"openai": {
Expand Down

0 comments on commit 3d523f8

Please sign in to comment.