-
-
Notifications
You must be signed in to change notification settings - Fork 69
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Merge pull request #546 from metrico/feature/grafana_profiles_plugin
Feature/grafana profiles plugin
- Loading branch information
Showing
25 changed files
with
4,591 additions
and
473 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
|
@@ -39,6 +39,11 @@ jobs: | |
- run: npm run postinstall | ||
- run: git submodule init | ||
- run: git submodule update | ||
- name: Install Compose | ||
uses: ndeloof/[email protected] | ||
with: | ||
version: v2.1.0 # defaults to 'latest' | ||
legacy: true # will also install in PATH as `docker-compose` | ||
- run: docker-compose -f docker/e2e/docker-compose-cluster.yaml up -d | ||
- run: sleep 5 | ||
- name: Workflow Telemetry | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
use crate::pattern::Pattern; | ||
use uuid::Uuid; | ||
|
||
pub struct PatternRegistry { | ||
patterns: Vec<Pattern>, | ||
} | ||
|
||
impl PatternRegistry { | ||
pub const fn new() -> PatternRegistry { | ||
PatternRegistry { patterns: Vec::new() } | ||
} | ||
|
||
pub fn find_pattern(&mut self, str_text: &Vec<String>, i_text: &Vec<u64>, sample: String) -> &Pattern { | ||
let mut idx: i32 = -1; | ||
let mut mtc = 0; | ||
for i in 0..self.patterns.len() { | ||
mtc = self.patterns[i].match_text(&i_text); | ||
if mtc == -1 || mtc > self.patterns[i].fluct { | ||
continue; | ||
} | ||
idx = i as i32; | ||
break; | ||
} | ||
|
||
if idx == -1 { | ||
let pattern = Pattern::new(Uuid::new_v4().to_string(), &i_text, &str_text, sample); | ||
self.patterns.push(pattern); | ||
idx = (self.patterns.len() - 1) as i32; | ||
} else if mtc != 0 { | ||
self.patterns[idx as usize].adjust_pattern(&i_text); | ||
} | ||
return &self.patterns[idx as usize]; | ||
} | ||
|
||
pub fn to_string(&self) -> String { | ||
let mut s = String::new(); | ||
for i in 0..self.patterns.len() { | ||
s += self.patterns[i].to_string().as_str(); | ||
s += "\n"; | ||
} | ||
return s | ||
} | ||
} | ||
|
||
pub static mut REGISTRY: PatternRegistry = PatternRegistry::new(); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,45 @@ | ||
use regex::{Regex, CaptureMatches, Match}; | ||
|
||
/*pub fn tokenize(re: &Regex, text: &str) -> CaptureMatches { | ||
return re.captures_iter(text); | ||
}*/ | ||
|
||
pub struct Tokenizer<'a> { | ||
text: String, | ||
pos: usize, | ||
re: Regex, | ||
iter: Option<CaptureMatches<'a, 'a>> | ||
} | ||
|
||
impl Tokenizer<'_> { | ||
pub fn new<'a>(text: &'a str) -> Tokenizer<'a> { | ||
let mut res = Tokenizer { | ||
text: text.to_string(), | ||
pos: 0, | ||
re: Regex::new(r"([\p{L}_]+|[\d.]+|[^\p{L}_\d.]+)\s*").unwrap(), | ||
iter: None | ||
}; | ||
res | ||
} | ||
} | ||
|
||
impl Iterator for Tokenizer<'_> { | ||
type Item = String; | ||
|
||
fn next(&mut self) -> Option<Self::Item> { | ||
None | ||
/*let cap: Option<Match> = None; | ||
if let Some(c) = cap { | ||
self.pos += c.get(0).unwrap().end(); | ||
Some(c.get(0).unwrap().as_str().to_string()) | ||
} else { | ||
None | ||
}*/ | ||
} | ||
} | ||
|
||
#[test] | ||
fn test_tokenizer() { | ||
let text = "Hello, world! 123"; | ||
let mut tokenizer = Tokenizer::new(text); | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
|
||
type int64 = string; | ||
type uint64 = string; | ||
type units = string; | ||
|
||
export interface Flamebearer { | ||
version: number, | ||
flamebearerProfileV1: flamebearerProfileV1 | ||
telemetry?: {[key: string]: any} | ||
} | ||
|
||
export interface flamebearerProfileV1 { | ||
flamebearer: flamebearerV1, | ||
metadata: flamebearerMetadataV1, | ||
timeline: flamebearerTimelineV1, | ||
groups: {[key: string]: flamebearerTimelineV1} | ||
heatmap: heatmap, | ||
leftTicks: string, | ||
rightTicks: string, | ||
} | ||
|
||
export interface flamebearerV1 { | ||
names: string, | ||
levels: [[number]], | ||
numTicks: number, | ||
maxSelf: number | ||
} | ||
|
||
export interface flamebearerMetadataV1 { | ||
format: string, | ||
spyName: string, | ||
sampleRate: number, | ||
units: units, | ||
name: string | ||
} | ||
|
||
export interface flamebearerTimelineV1 { | ||
startTime: int64, | ||
samples: [uint64] | ||
durationDelta: int64, | ||
watermarks: {[key: number]: int64} | ||
} | ||
|
||
export interface heatmap { | ||
values: [[uint64]], | ||
timeBuckets: int64, | ||
valueBuckets: int64, | ||
startTime: int64, | ||
endTime: int64, | ||
minValue: uint64, | ||
maxValue: uint64, | ||
minDepth: uint64, | ||
maxDepth: uint64 | ||
} | ||
|
||
export interface level { | ||
values: number[] | ||
} | ||
|
||
export interface flamegraphDiff { | ||
name: string[], | ||
levels: level[], | ||
total: int64, | ||
maxSelf: int64, | ||
leftTicks: int64, | ||
rightTicks: int64 | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,57 @@ | ||
const { bufferize } = require('./shared') | ||
|
||
/** | ||
* | ||
* @param req | ||
*/ | ||
const series = async (req, payload) => { | ||
let body = await bufferize(payload) | ||
body = JSON.parse(body.toString()) | ||
req.type = 'json' | ||
return { | ||
getStart: () => body.start, | ||
getEnd: () => body.end, | ||
getMatchersList: () => body.matchers, | ||
getLabelNamesList: () => body.labelNames | ||
} | ||
} | ||
|
||
const getProfileStats = async (req, payload) => { | ||
req.type = 'json' | ||
return null | ||
} | ||
|
||
const settingsGet = async (req, payload) => { | ||
req.type = 'json' | ||
return {} | ||
} | ||
|
||
const labelNames = async (req, payload) => { | ||
req.type = 'json' | ||
let body = await bufferize(payload) | ||
body = JSON.parse(body.toString()) | ||
return { | ||
getStart: () => body.start, | ||
getEnd: () => body.end, | ||
getName: () => body.name | ||
} | ||
} | ||
|
||
const analyzeQuery = async (req, payload) => { | ||
req.type = 'json' | ||
let body = await bufferize(payload) | ||
body = JSON.parse(body.toString()) | ||
return { | ||
getStart: () => body.start, | ||
getEnd: () => body.end, | ||
getQuery: () => body.query | ||
} | ||
} | ||
|
||
module.exports = { | ||
series, | ||
getProfileStats, | ||
labelNames, | ||
settingsGet, | ||
analyzeQuery | ||
} |
Oops, something went wrong.