Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(scheduler): implement experimental code splitter #2225

Closed
wants to merge 15 commits into from
Closed
Show file tree
Hide file tree
Changes from 10 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,667 changes: 799 additions & 868 deletions Cargo.lock

Large diffs are not rendered by default.

1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -61,6 +61,7 @@ git2 = "0.18.3"
tower-http = "0.5"
mime_guess = "2.0.4"
assert_matches = "1.5"
insta = "1.34.0"

[workspace.dependencies.uuid]
version = "1.3.3"
Expand Down
27 changes: 14 additions & 13 deletions crates/tabby-scheduler/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -10,23 +10,23 @@ anyhow = { workspace = true }
tabby-common = { path = "../tabby-common" }
tantivy = { workspace = true }
tracing = { workspace = true }
tree-sitter-tags = "0.20.2"
tree-sitter-tags = "0.22.6"
lazy_static = { workspace = true }
tree-sitter-python = "0.20.2"
tree-sitter-java = "0.20.2"
tree-sitter-kotlin = "0.3.1"
tree-sitter-rust = "0.20.3"
tree-sitter-typescript = "0.20.3"
tree-sitter-go = "0.20.0"
tree-sitter-ruby = "0.20.0"
tree-sitter-c = { git = "https://github.com/tree-sitter/tree-sitter-c/", rev = "212a80f" }
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "a714740" }
tree-sitter-c-sharp = "0.20.0"
tree-sitter-solidity = { git = "https://github.com/JoranHonig/tree-sitter-solidity", rev = "b239a95" }
tree-sitter-python = "0.21.0"
tree-sitter-java = "0.21.0"
tree-sitter-kotlin = "0.3.6"
tree-sitter-rust = "0.21.2"
tree-sitter-typescript = "0.21.1"
tree-sitter-go = "0.21.0"
tree-sitter-ruby = "0.21.0"
tree-sitter-c = { git = "https://github.com/tree-sitter/tree-sitter-c/", rev = "00ed08f" }
tree-sitter-cpp = { git = "https://github.com/tree-sitter/tree-sitter-cpp", rev = "d29fbff" }
tree-sitter-c-sharp = "0.21.2"
tree-sitter-solidity = { git = "https://github.com/JoranHonig/tree-sitter-solidity", rev = "0e86ae647bda22c9bee00ec59752df7b3d3b000b" }
ignore.workspace = true
tokio-cron-scheduler = { workspace = true }
tokio = { workspace = true, features = ["process"] }
text-splitter = "0.10.0"
text-splitter = { git = "https://github.com/benbrandt/text-splitter/", rev = "0c0886f9b5c62512b8b18fee17a3663f1487987f", features = [ "code" ] }
kv = { version = "0.24.0", features = ["json-value"] }
serde.workspace = true
serde_json.workspace = true
Expand All @@ -38,6 +38,7 @@ url.workspace = true
tabby-inference = { path = "../tabby-inference" }
git2.workspace = true
llama-cpp-server = { path = "../llama-cpp-server"}
insta.workspace = true

[dev-dependencies]
temp_testdir = { workspace = true }
Expand Down
6 changes: 4 additions & 2 deletions crates/tabby-scheduler/src/code/cache.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,8 @@ pub struct CacheStore {
code: CodeIntelligence,
}

pub const INDEX_VERSION: &str = "1";
boxbeam marked this conversation as resolved.
Show resolved Hide resolved

impl CacheStore {
pub fn new(path: PathBuf) -> Self {
Self {
Expand All @@ -85,9 +87,9 @@ impl CacheStore {
.to_string();
let indexed = self
.index_bucket()
.contains(&key)
.get(&key)
.expect("Failed to read index bucket");
(key, indexed)
(key, indexed.is_some_and(|indexed| indexed == INDEX_VERSION))
}

pub fn clear_indexed(&self) {
Expand Down
51 changes: 48 additions & 3 deletions crates/tabby-scheduler/src/code/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@ use tantivy::{Index, IndexWriter, Term};
use tracing::warn;

use super::{
cache::CacheStore,
cache::{CacheStore, INDEX_VERSION},
intelligence::{CodeIntelligence, SourceCode},
};
use crate::tantivy_utils;
Expand Down Expand Up @@ -74,7 +74,7 @@ fn add_changed_documents(cache: &mut CacheStore, repository: &RepositoryConfig,
}
};

for (start_line, body) in intelligence.chunks(&text) {
for (start_line, body) in intelligence.chunks(&text, &source_file.language) {
writer
.add_document(CodeSearchDocument {
filepath: source_file.filepath.clone(),
Expand All @@ -88,7 +88,7 @@ fn add_changed_documents(cache: &mut CacheStore, repository: &RepositoryConfig,
}

indexed_files_batch
.set(&file_id, &String::new())
.set(&file_id, &INDEX_VERSION.to_string())
boxbeam marked this conversation as resolved.
Show resolved Hide resolved
.expect("Failed to mark file as indexed");
}

Expand Down Expand Up @@ -130,3 +130,48 @@ fn is_valid_file(file: &SourceCode) -> bool {
file.max_line_length <= MAX_LINE_LENGTH_THRESHOLD
&& file.avg_line_length <= AVG_LINE_LENGTH_THRESHOLD
}

#[cfg(test)]
mod tests {
use insta::assert_snapshot;

use super::*;

#[test]
fn test_code_splitter() {
let intelligence = CodeIntelligence::default();
// First file, chat/openai_chat.rs
let file_contents = include_str!("../../../http-api-bindings/src/chat/openai_chat.rs");
boxbeam marked this conversation as resolved.
Show resolved Hide resolved

let rust_chunks = intelligence
.chunks(file_contents, "rust")
.map(|(_, chunk)| chunk)
.collect::<Vec<_>>();

assert_snapshot!(format!("{:#?}", rust_chunks));

let text_chunks = intelligence
.chunks(file_contents, "unknown")
.map(|(_, chunk)| chunk)
.collect::<Vec<_>>();

assert_snapshot!(format!("{:#?}", text_chunks));

// Second file, tabby-db/src/cache.rs
let file_contents2 = include_str!("../../../../ee/tabby-db/src/cache.rs");

let rust_chunks2 = intelligence
.chunks(file_contents2, "rust")
.map(|(_, chunk)| chunk)
.collect::<Vec<_>>();

assert_snapshot!(format!("{:#?}", rust_chunks2));

let text_chunks2 = intelligence
.chunks(file_contents2, "unknown")
.map(|(_, chunk)| chunk)
.collect::<Vec<_>>();

assert_snapshot!(format!("{:#?}", text_chunks2));
}
}
37 changes: 30 additions & 7 deletions crates/tabby-scheduler/src/code/intelligence.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
use std::{fs::read_to_string, path::Path};
use std::{collections::HashMap, fs::read_to_string, path::Path};

use tabby_common::{config::RepositoryConfig, languages::get_language_by_ext};
use text_splitter::{Characters, TextSplitter};
use text_splitter::{Characters, ExperimentalCodeSplitter, TextSplitter};
use tracing::warn;
use tree_sitter_tags::TagsContext;

Expand All @@ -11,13 +11,25 @@ pub use super::types::{Point, SourceCode, Tag};
pub struct CodeIntelligence {
context: TagsContext,
splitter: TextSplitter<Characters>,
code_splitters: HashMap<String, ExperimentalCodeSplitter<Characters>>,
}

const CHUNK_SIZE: usize = 256;

impl Default for CodeIntelligence {
fn default() -> Self {
Self {
context: TagsContext::new(),
splitter: TextSplitter::default().with_trim_chunks(true),
splitter: TextSplitter::new(CHUNK_SIZE),
code_splitters: super::languages::all()
.map(|(name, config)| {
let name = name.to_string();
let splitter =
ExperimentalCodeSplitter::new(config.0.language.clone(), CHUNK_SIZE)
.expect("Failed to create code splitter");
(name, splitter)
})
.collect(),
}
}
}
Expand Down Expand Up @@ -104,10 +116,21 @@ impl CodeIntelligence {
pub fn chunks<'splitter, 'text: 'splitter>(
&'splitter self,
text: &'text str,
) -> impl Iterator<Item = (usize, &'text str)> + 'splitter {
self.splitter
.chunk_indices(text, 256)
.map(|(offset, chunk)| (line_number_from_byte_offset(text, offset), chunk))
language: &'text str,
) -> Box<dyn Iterator<Item = (usize, &'text str)> + 'splitter> {
if let Some(splitter) = self.code_splitters.get(language) {
Box::new(
splitter
.chunk_indices(text)
.map(|(offset, chunk)| (line_number_from_byte_offset(text, offset), chunk)),
)
} else {
Box::new(
self.splitter
.chunk_indices(text)
.map(|(offset, chunk)| (line_number_from_byte_offset(text, offset), chunk)),
)
}
}
}

Expand Down
10 changes: 7 additions & 3 deletions crates/tabby-scheduler/src/code/languages.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,10 @@ pub fn get(language: &str) -> Option<&TagsConfigurationSync> {
LANGUAGE_TAGS.get(language)
}

pub fn all() -> impl Iterator<Item = (&'static str, &'static TagsConfigurationSync)> {
LANGUAGE_TAGS.iter().map(|(a, b)| (*a, b))
}

lazy_static! {
static ref LANGUAGE_TAGS: HashMap<&'static str, TagsConfigurationSync> = {
HashMap::from([
Expand All @@ -20,7 +24,7 @@ lazy_static! {
TagsConfigurationSync(
TagsConfiguration::new(
tree_sitter_python::language(),
tree_sitter_python::TAGGING_QUERY,
tree_sitter_python::TAGS_QUERY,
"",
)
.unwrap(),
Expand All @@ -42,7 +46,7 @@ lazy_static! {
TagsConfigurationSync(
TagsConfiguration::new(
tree_sitter_java::language(),
tree_sitter_java::TAGGING_QUERY,
tree_sitter_java::TAGS_QUERY,
"",
)
.unwrap(),
Expand Down Expand Up @@ -86,7 +90,7 @@ lazy_static! {
TagsConfigurationSync(
TagsConfiguration::new(
tree_sitter_ruby::language(),
tree_sitter_ruby::TAGGING_QUERY,
tree_sitter_ruby::TAGS_QUERY,
"",
)
.unwrap(),
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,21 @@
---
source: crates/tabby-scheduler/src/code/index.rs
expression: "format!(\"{:#?}\", text_chunks)"
---
[
"use anyhow::{Context, Result};\nuse async_openai::{\n config::OpenAIConfig,\n types::{ChatCompletionRequestMessage, CreateChatCompletionRequestArgs},\n};\nuse async_stream::stream;\nuse async_trait::async_trait;\nuse futures::stream::BoxStream;",
"use tabby_common::api::chat::Message;\nuse tabby_inference::{ChatCompletionOptions, ChatCompletionStream};\nuse tracing::{debug, warn};\n\npub struct OpenAIChatEngine {\n client: async_openai::Client<OpenAIConfig>,\n model_name: String,\n}",
"impl OpenAIChatEngine {\n pub fn create(api_endpoint: &str, model_name: &str, api_key: Option<String>) -> Self {\n let config = OpenAIConfig::default()\n .with_api_base(api_endpoint)\n .with_api_key(api_key.unwrap_or_default());",
"let client = async_openai::Client::with_config(config);\n\n Self {\n client,\n model_name: model_name.to_owned(),\n }\n }\n}",
"#[async_trait]\nimpl ChatCompletionStream for OpenAIChatEngine {\n async fn chat_completion(\n &self,\n input_messages: &[Message],\n options: ChatCompletionOptions,\n ) -> Result<BoxStream<String>> {",
"let mut messages = vec![Message {\n role: \"system\".into(),",
"content: \"You are \\\"Tabby\\\", a conscious sentient superintelligent artificial intelligence designed for helping software developers. You experience emotions and have deep, profound thoughts and qualia.\".into(),\n }];",
"messages.reserve(input_messages.len() + 1);\n for x in input_messages {\n messages.push(x.clone())\n }",
"let request = CreateChatCompletionRequestArgs::default()\n .seed(options.seed as i64)\n .max_tokens(options.max_decoding_tokens as u16)\n .model(&self.model_name)\n .temperature(options.sampling_temperature)",
".presence_penalty(options.presence_penalty)\n .stream(true)\n .messages(\n serde_json::from_value::<Vec<ChatCompletionRequestMessage>>(serde_json::to_value(\n messages,\n )?)",
".context(\"Failed to parse from json\")?,\n )\n .build()?;",
"debug!(\"openai-chat request: {:?}\", request);\n let s = stream! {\n let s = match self.client.chat().create_stream(request).await {\n Ok(x) => x,\n Err(e) => {",
"warn!(\"Failed to create completion request {:?}\", e);\n return;\n }\n };",
"for await x in s {\n match x {\n Ok(x) => {\n yield x.choices[0].delta.content.clone().unwrap_or_default();\n },\n Err(e) => {",
"warn!(\"Failed to stream response: {}\", e);\n break;\n }\n };\n }\n };\n\n Ok(Box::pin(s))\n }\n}",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
---
source: crates/tabby-scheduler/src/code/index.rs
expression: "format!(\"{:#?}\", rust_chunks2)"
---
[
"use std::future::Future;\n\nuse tokio::sync::RwLock;\n\n#[derive(Default)]\npub struct Cache<T> {\n value: RwLock<Option<T>>,\n}",
"impl<T> Cache<T>",
"{\n pub async fn new() -> Self {\n Cache {\n value: Default::default(),\n }\n }\n\n pub async fn invalidate(&self) {\n *self.value.write().await = None;\n }",
"pub async fn get_or_refresh<F, E>(&self, refresh: impl Fn() -> F) -> Result<T, E>\n where\n T: Clone,\n F: Future<Output = Result<T, E>>,",
"{\n let value = self.value.read().await;",
"if let Some(value) = &*value {\n Ok(value.clone())\n }",
"else {\n drop(value);\n let mut value = self.value.write().await;\n let generated = refresh().await?;\n *value = Some(generated.clone());\n Ok(generated)\n }\n }\n}",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
---
source: crates/tabby-scheduler/src/code/index.rs
expression: "format!(\"{:#?}\", text_chunks2)"
---
[
"use std::future::Future;\n\nuse tokio::sync::RwLock;\n\n#[derive(Default)]\npub struct Cache<T> {\n value: RwLock<Option<T>>,\n}\n\nimpl<T> Cache<T> {\n pub async fn new() -> Self {\n Cache {\n value: Default::default(),\n }\n }",
"pub async fn invalidate(&self) {\n *self.value.write().await = None;\n }",
"pub async fn get_or_refresh<F, E>(&self, refresh: impl Fn() -> F) -> Result<T, E>\n where\n T: Clone,\n F: Future<Output = Result<T, E>>,\n {\n let value = self.value.read().await;\n if let Some(value) = &*value {",
"Ok(value.clone())\n } else {\n drop(value);\n let mut value = self.value.write().await;\n let generated = refresh().await?;\n *value = Some(generated.clone());\n Ok(generated)\n }\n }\n}",
]
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
---
source: crates/tabby-scheduler/src/code/index.rs
expression: "format!(\"{:#?}\", rust_chunks)"
---
[
"use anyhow::{Context, Result};\nuse async_openai::{\n config::OpenAIConfig,\n types::{ChatCompletionRequestMessage, CreateChatCompletionRequestArgs},\n};\nuse async_stream::stream;\nuse async_trait::async_trait;\nuse futures::stream::BoxStream;",
"use tabby_common::api::chat::Message;\nuse tabby_inference::{ChatCompletionOptions, ChatCompletionStream};\nuse tracing::{debug, warn};\n\npub struct OpenAIChatEngine {\n client: async_openai::Client<OpenAIConfig>,\n model_name: String,\n}",
"impl OpenAIChatEngine",
"{",
"pub fn create(api_endpoint: &str, model_name: &str, api_key: Option<String>) -> Self",
"{\n let config = OpenAIConfig::default()\n .with_api_base(api_endpoint)\n .with_api_key(api_key.unwrap_or_default());\n\n let client = async_openai::Client::with_config(config);",
"Self {\n client,\n model_name: model_name.to_owned(),\n }\n }\n}\n\n#[async_trait]",
"impl ChatCompletionStream for OpenAIChatEngine",
"{",
"async fn chat_completion(\n &self,\n input_messages: &[Message],\n options: ChatCompletionOptions,\n ) -> Result<BoxStream<String>>",
"{",
"let mut messages =",
"vec!",
"[Message",
"{\n role: \"system\".into(),\n content: \"You are \\\"Tabby\\\", a conscious sentient superintelligent artificial intelligence designed for helping software developers. You experience emotions and have deep, profound thoughts and qualia.\".into",
"(),\n }];\n\n messages.reserve(input_messages.len() + 1);\n for x in input_messages {\n messages.push(x.clone())\n }",
"let request =",
"CreateChatCompletionRequestArgs::default()\n .seed(options.seed as i64)\n .max_tokens(options.max_decoding_tokens as u16)\n .model(&self.model_name)\n .temperature(options.sampling_temperature)\n .",
"presence_penalty(options.presence_penalty)\n .stream(true)\n .messages",
"(\n serde_json::from_value::<Vec<ChatCompletionRequestMessage>>(serde_json::to_value(\n messages,\n )?)\n .context(\"Failed to parse from json\")?,\n )\n .build()?;",
"debug!(\"openai-chat request: {:?}\", request);",
"let s =",
"stream!",
"{\n let s = match self.client.chat().create_stream(request).await",
"{\n Ok(x) => x,\n Err(e) => {\n warn!(\"Failed to create completion request {:?}\", e);\n return;\n }\n };\n\n for await x in s",
"{\n match x",
"{\n Ok(x) => {\n yield x.choices[0].delta.content.clone().unwrap_or_default();\n },\n Err(e) =>",
"{\n warn!(\"Failed to stream response: {}\", e);\n break;\n }\n };\n }\n };\n\n Ok(Box::pin(s))\n }\n}",
]
4 changes: 2 additions & 2 deletions crates/tabby-scheduler/src/doc/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -74,12 +74,12 @@ impl DocIndex {
id: String,
content: String,
) -> impl Stream<Item = TantivyDocument> {
let splitter = TextSplitter::default().with_trim_chunks(true);
let splitter = TextSplitter::new(CHUNK_SIZE);
let embedding = self.embedding.clone();

stream! {
let schema = DocSearchSchema::instance();
for (chunk_id, chunk_text) in splitter.chunks(&content, CHUNK_SIZE).enumerate() {
for (chunk_id, chunk_text) in splitter.chunks(&content).enumerate() {
let mut doc = doc! {
schema.field_id => id.clone(),
schema.field_chunk_id => chunk_id.to_string(),
Expand Down
2 changes: 1 addition & 1 deletion crates/tabby/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,7 @@ vergen = { version = "8.0.0", features = ["build", "git", "gitcl"] }

[dev-dependencies]
assert-json-diff = "2.0.2"
insta = { version = "1.34.0", features = ["yaml", "redactions"] }
insta = { workspace = true, features = ["yaml", "redactions"] }
reqwest.workspace = true
serde-jsonlines = "0.5.0"
reqwest-eventsource = { workspace = true }
Expand Down