Skip to content

Commit

Permalink
refactor: update
Browse files Browse the repository at this point in the history
  • Loading branch information
Siumauricio committed Dec 29, 2024
1 parent 0709be9 commit c0c12a1
Show file tree
Hide file tree
Showing 3 changed files with 42 additions and 42 deletions.
1 change: 0 additions & 1 deletion apps/monitoring/src/index.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
import { serve } from "@hono/node-server";
import { Hono } from "hono";
import { cors } from "hono/cors";
import { bearerAuth } from "hono/bearer-auth";
import { logServerMetrics } from "./socket.js";
import { config } from "dotenv";
import { serverLogFile } from "./constants.js";
Expand Down
5 changes: 0 additions & 5 deletions apps/monitoring/src/socket.ts
Original file line number Diff line number Diff line change
@@ -1,10 +1,7 @@
import fs from "node:fs";
import path from "node:path";
import Docker from "dockerode";
import si from "systeminformation";
import { config } from "dotenv";
import { containerLogFile, serverLogFile } from "./constants.js";
config();

const docker = new Docker();

Expand Down Expand Up @@ -48,7 +45,6 @@ const getServerMetrics = async () => {
timestamp: new Date().toISOString(),
};
};
// /Users/mauricio/Documents/Github/Personal/dokploy/apps/dokploy/.docker
export const logServerMetrics = () => {
setInterval(async () => {
const metrics = await getServerMetrics();
Expand All @@ -63,7 +59,6 @@ export const logServerMetrics = () => {
}, 5000);
};

// === 2. Métricas de Contenedores ===
export const logContainerMetrics = () => {
setInterval(async () => {
try {
Expand Down
78 changes: 42 additions & 36 deletions apps/monitoring/src/utils.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import { createReadStream, statSync } from "node:fs";
import { statSync } from "node:fs";
import fs from "node:fs/promises";

// Cache configuration for metrics
Expand All @@ -13,7 +13,7 @@ const CACHE_TTL = 5000; // 5 seconds TTL
*/
export function parseLog(logContent: string) {
if (!logContent.trim()) return [];

const lines = logContent.trim().split("\n");
return lines.map((line) => {
try {
Expand Down Expand Up @@ -76,43 +76,46 @@ export function filterByTimestamp(
* @returns Array of parsed metric objects
*/
async function readLastNLines(filePath: string, limit: number) {
const { size } = statSync(filePath);
const chunkSize = Math.min(size, limit * 200); // Estimate 200 bytes per line
const buffer = Buffer.alloc(chunkSize);
const fd = await fs.open(filePath, 'r');
try {
await fd.read(buffer, 0, chunkSize, size - chunkSize);
const content = buffer.toString('utf8');
const lines = content.split('\n').filter(line => line.trim());
const lastLines = lines.slice(-limit);
return lastLines.map(line => {
try {
return JSON.parse(line);
} catch {
return { raw: line };
}
});
} finally {
await fd.close();
}
const { size } = statSync(filePath);
const chunkSize = Math.min(size, limit * 200); // Estimate 200 bytes per line
const buffer = Buffer.alloc(chunkSize);

const fd = await fs.open(filePath, "r");
try {
await fd.read(buffer, 0, chunkSize, size - chunkSize);
const content = buffer.toString("utf8");
const lines = content.split("\n").filter((line) => line.trim());
const lastLines = lines.slice(-limit);

return lastLines.map((line) => {
try {
return JSON.parse(line);
} catch {
return { raw: line };
}
});
} finally {
await fd.close();
}
}

/**
* Process metrics from a file with optimized strategies:
* - For limit-only queries: Read only required bytes from end of file
* - For full file or date filters: Use in-memory cache with TTL
*
*
* @param filePath Path to the metrics file
* @param options Query options (limit, start date, end date)
* @returns Processed metrics based on the options
*/
export async function processMetricsFromFile(filePath: string, options: {
start?: string;
end?: string;
limit?: number;
}) {
export async function processMetricsFromFile(
filePath: string,
options: {
start?: string;
end?: string;
limit?: number;
},
) {
const { start, end, limit } = options;

// For limit-only queries, use optimized tail reading
Expand All @@ -127,7 +130,7 @@ export async function processMetricsFromFile(filePath: string, options: {
return processMetrics(metrics, options);
}

const content = await fs.readFile(filePath, 'utf8');
const content = await fs.readFile(filePath, "utf8");
const metrics = parseLog(content);
metricsCache.set(filePath, metrics);
lastCacheUpdate = now;
Expand All @@ -141,19 +144,22 @@ export async function processMetricsFromFile(filePath: string, options: {
* @param options Processing options (limit, start date, end date)
* @returns Processed metrics based on the options
*/
export function processMetrics(metrics: any[], options: {
start?: string;
end?: string;
limit?: number;
}) {
export function processMetrics(
metrics: any[],
options: {
start?: string;
end?: string;
limit?: number;
},
) {
const { start, end, limit } = options;

// First filter by timestamp
const filteredMetrics = filterByTimestamp(metrics, start, end);

// If limit is 0, return empty array
if (limit === 0) return [];

// If there's a limit > 0, apply it
return limit && limit > 0 ? filteredMetrics.slice(-limit) : filteredMetrics;
}

0 comments on commit c0c12a1

Please sign in to comment.