Skip to content

Commit

Permalink
feat: Added built-in model, quick start btn, template and debug insights
Browse files Browse the repository at this point in the history
  • Loading branch information
BrutalCoding committed Sep 17, 2023
1 parent 4bda1cd commit a1ddc13
Show file tree
Hide file tree
Showing 5 changed files with 113 additions and 22 deletions.
Binary file added shady_ai_flutter/assets/shady.gguf
Binary file not shown.
51 changes: 50 additions & 1 deletion shady_ai_flutter/lib/data/prompt_template.dart
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,33 @@ class PromptTemplate with _$PromptTemplate {

// Returns a List<PromptTemplate> of all the available prompt templates.
static List<PromptTemplate> get all => [
defaultPromptTemplate(),
PromptTemplate.llama2Chat(),
PromptTemplate.synthia(),
PromptTemplate.chat(),
PromptTemplate.nothing(),
];

static PromptTemplate defaultPromptTemplate() {
return all.first;
return PromptTemplate.story();
}

factory PromptTemplate.story() {
return PromptTemplate(
label: 'Story',
promptTemplate: '{prompt}',
prompt: "One day, a boy named Luca went for a walk.",
postProcess: (output) => output,
);
}

factory PromptTemplate.nothing() {
return PromptTemplate(
label: 'Nothing',
promptTemplate: '{prompt}',
prompt: '',
postProcess: (output) => output,
);
}

factory PromptTemplate.synthia() {
Expand Down Expand Up @@ -60,6 +81,34 @@ class PromptTemplate with _$PromptTemplate {
promptTemplate:
"[INST] <<SYS>>\nYou are a helpful, respectful and honest assistant. Always answer as helpfully as possible, while being safe. Your answers should not include any harmful, unethical, racist, sexist, toxic, dangerous, or illegal content. Please ensure that your responses are socially unbiased and positive in nature. If a question does not make any sense, or is not factually coherent, explain why instead of answering something not correct. If you don't know the answer to a question, please don't share false information.\n<</SYS>>\n{prompt}[/INST]",
prompt: 'Write a story about llamas',
postProcess: (output) {
// We remove everything that is between <<SYS>> and <</SYS>>.
// We need to find the index of <<SYS>>.
// Then we need to find the index of <</SYS>>.
// Then we need to substring from the index of <<SYS>> until the index of <</SYS>> + 7.
final sysIndex = output.indexOf('<<SYS>>');

if (sysIndex != -1) {
final endSysIndex = output.indexOf('<</SYS>>', sysIndex);
if (endSysIndex != -1) {
final assistantResponse = output.substring(
sysIndex,
endSysIndex + 7,
);
return assistantResponse;
}
}

return output;
},
);
}

factory PromptTemplate.chat() {
return PromptTemplate(
label: 'Chat',
promptTemplate: "USER: {prompt}\nASSISTANT:",
prompt: 'Write a story about llamas',
postProcess: (output) {
// Substring from "ASSISTANT:" until period (.):
// We need to find the index of : after ASSISTANT.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -255,8 +255,24 @@ class InstructionInference extends _$InstructionInference {
}
}

final output = tokensDecodedIntoPieces.join('');

return outputPostProcess?.call(output) ?? output;
// Print parameters set by user and the model
print('=======\nParameters received from user=======');
print('pathToFile: $pathToFile');
print('promptTemplate: ${promptTemplate.toJson()}');

print('=======\nParameters received from model (llama.cpp)=======');
print('n_ctx: ${lparams.n_ctx}');
print('n_gpu_layers: ${lparams.n_gpu_layers}');
print('n_batch: ${lparams.n_batch}');
print('system_info: ${llama_cpp.llama_print_system_info}');

print('=======\nAI responded with (raw): $tokensDecodedIntoPieces=======');
final outputPre = tokensDecodedIntoPieces.join('');
print(
'=======\nAI responded with (pre-post-processing): $outputPre=======',
);
final outputPos = outputPostProcess?.call(outputPre) ?? outputPre;
print('=======\nAI responded with (post-processed): $outputPos=======');
return outputPos;
}
}

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

60 changes: 43 additions & 17 deletions shady_ai_flutter/lib/main.dart
Original file line number Diff line number Diff line change
Expand Up @@ -80,17 +80,45 @@ class QuickStartPage extends HookConsumerWidget {
final textTheme = Theme.of(context).textTheme;
final filePath = useState<String>('');
final promptTemplate = useState<PromptTemplate>(
PromptTemplate.llama2Chat(),
PromptTemplate.defaultPromptTemplate(),
);
final textControllerPromptSystem = useTextEditingController()
..text = promptTemplate.value.systemMessage;
final textController = useTextEditingController();
final textController = useTextEditingController.fromValue(
TextEditingValue(
text: promptTemplate.value.prompt,
),
);
final stepperIndex = useState<int>(0);

// If the template changes, update the text controller with the default prompt
useEffect(
() {
textController.text = promptTemplate.value.prompt;
return null;
},
[promptTemplate.value.promptTemplate],
);

return Scaffold(
appBar: AppBar(
title: Text("ShadyAI"),
centerTitle: true,
actions: [
// Iconbutton to reset filepath etc
IconButton(
icon: const Icon(Icons.refresh),
onPressed: () {
filePath.value = '';
promptTemplate.value = PromptTemplate.defaultPromptTemplate();
textControllerPromptSystem.text =
promptTemplate.value.systemMessage;
textController.text = promptTemplate.value.prompt;
stepperIndex.value = 0;
},
),

// Iconbutton to show info dialog
IconButton(
icon: const Icon(Icons.info_outline),
onPressed: () {
Expand Down Expand Up @@ -281,6 +309,19 @@ class QuickStartPage extends HookConsumerWidget {
const SizedBox(
height: 16,
),
// TextButton to start with built-in model
if (filePath.value.isEmpty)
TextButton(
onPressed: () {
filePath.value = 'assets/shady.gguf';
},
child: const Text(
'I want to try a built-in model',
),
),
const SizedBox(
height: 16,
),
Builder(
builder: (context) {
if (filePath.value.isEmpty) {
Expand Down Expand Up @@ -471,21 +512,6 @@ class QuickStartPage extends HookConsumerWidget {
],
),
),
const SizedBox(
height: 16,
),
TextButton(
child: const Text(
'I want to try another model',
),
onPressed: () {
filePath.value = '';
stepperIndex.value = 0;
promptTemplate.value =
PromptTemplate.defaultPromptTemplate();
textController.clear();
},
),
],
);
},
Expand Down

0 comments on commit a1ddc13

Please sign in to comment.