Skip to content

Commit

Permalink
Merge pull request #176 from mytlogos/status-page
Browse files Browse the repository at this point in the history
add status page
  • Loading branch information
mytlogos authored Dec 16, 2021
2 parents 09f4175 + f95bf32 commit 5e2de1a
Show file tree
Hide file tree
Showing 12 changed files with 536 additions and 11 deletions.
4 changes: 4 additions & 0 deletions packages/core/src/database/contexts/databaseContext.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,10 @@ export class DatabaseContext extends SubContext {
return this.query("SELECT version FROM enterprise_database_info LIMIT 1;");
}

public getServerVersion(): Promise<[{ version: string }]> {
return this.query("SELECT version() as version");
}

public async startMigration(): Promise<boolean> {
return (
this.query("UPDATE enterprise_database_info SET migrating=1;")
Expand Down
2 changes: 2 additions & 0 deletions packages/core/src/database/storages/storage.ts
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ import { ScraperHookContext } from "../contexts/scraperHookContext";
import { SubContext } from "../contexts/subContext";
import { AppEventContext } from "../contexts/appEventContext";
import { CustomHookContext } from "../contexts/customHookContext";
import { DatabaseContext } from "../contexts/databaseContext";

function inContext<T>(callback: ContextCallback<T, QueryContext>, transaction = true) {
return storageInContext(callback, (con) => queryContextProvider(con), transaction);
Expand Down Expand Up @@ -450,6 +451,7 @@ export function createStorage<T extends SubContext, K extends StringKeys<T> = ke
}

export const storage = new Storage();
export const databaseStorage = createStorage<DatabaseContext>("databaseContext");
export const mediumStorage = createStorage<MediumContext>("mediumContext");
export const partStorage = createStorage<PartContext>("partContext");
export const episodeStorage = createStorage<EpisodeContext>("episodeContext");
Expand Down
5 changes: 5 additions & 0 deletions packages/scraper/src/externals/hookManager.ts
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,7 @@ function registerHooks(hook: Hook[] | Hook): void {
newsAdapter.push(value.newsAdapter);
}
if (value.searchAdapter) {
value.searchAdapter.hookName = value.name;
searchAdapter.push(value.searchAdapter);
}
if (value.domainReg) {
Expand Down Expand Up @@ -230,6 +231,10 @@ export function getSearcher(medium: MediaType): SearchScraper[] {
return searchAdapter.filter((searcher) => searcher.medium & medium);
}

export function getAllSearcher(): SearchScraper[] {
return [...searchAdapter];
}

export function getNewsAdapter(): NewsScraper[] {
return [...newsAdapter];
}
Expand Down
1 change: 1 addition & 0 deletions packages/scraper/src/externals/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -337,6 +337,7 @@ export interface SearchScraper {
(text: string, medium: number): Promise<SearchResult[]>;

medium: MediaType;
hookName?: string;
}

export interface TocScraper {
Expand Down
79 changes: 79 additions & 0 deletions packages/scraper/src/startCrawler.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,17 @@ import os from "os";
import debug from "debug";
import env from "enterprise-core/dist/env";
import { register, collectDefaultMetrics } from "prom-client";
import {
episodeDownloaderEntries,
getAllSearcher,
getHooks,
getNewsAdapter,
tocDiscoveryEntries,
tocScraperEntries,
} from "./externals/hookManager";
import path from "path";
import { readFileSync } from "fs";

collectDefaultMetrics({
labels: {
NODE_APP_INSTANCE: "enterprise-crawler",
Expand All @@ -17,6 +28,7 @@ collectDefaultMetrics({

// start websocket server
import "./websocket";

const debugMessenger = debug("enterprise-lister:crawler");
logger.info(`Process PID: ${process.pid} in environment '${process.env.NODE_ENV}'`);
// first start storage, then crawler, as crawler depends on storage
Expand All @@ -26,6 +38,12 @@ startCrawler();
const status = new AppStatus("crawler");
status.start();

function scraperEntry<T extends { hookName?: string }>(entry: [RegExp, T]): { pattern: string; name?: string } {
return {
pattern: entry[0] + "",
name: entry[1].hookName,
};
}
/**
* Create HTTP server.
*/
Expand All @@ -42,6 +60,67 @@ const server: Server = createServer((req, res) => {
});
return;
}
if (req.url === "/status") {
const packageJsonPath = path.join(path.dirname(__dirname), "package.json");

let packageJson: any;

try {
const packageString = readFileSync(packageJsonPath, { encoding: "utf8" });
packageJson = JSON.parse(packageString);
} catch (error) {
packageJson = { project_version: "Error" };
}
res.write(
stringify({
cpu_average: os.loadavg(),
memory: process.memoryUsage(),
freemem: os.freemem(),
totalmem: os.totalmem(),
uptime: os.uptime(),
project_version: packageJson.version,
node_version: process.version,
config: {
dbConLimit: env.dbConLimit,
dbHost: env.dbHost,
dbUser: env.dbUser,
dbPort: env.dbPort,
crawlerHost: env.crawlerHost,
crawlerPort: env.crawlerPort,
crawlerWSPort: env.crawlerWSPort,
port: env.port,
measure: env.measure,
development: env.development,
stopScrapeEvents: env.stopScrapeEvents,
},
hooks: {
all: getHooks().map((hook) => {
return {
name: hook.name,
medium: hook.medium,
domain: hook.domainReg + "",
};
}),
toc: tocScraperEntries().map(scraperEntry),
download: episodeDownloaderEntries().map(scraperEntry),
search: getAllSearcher().map((entry) => {
return {
name: entry.hookName,
};
}),
tocSearch: tocDiscoveryEntries().map(scraperEntry),
news: getNewsAdapter().map((entry) => {
return {
link: entry.link,
name: entry.hookName,
};
}),
},
}),
);
res.end();
return;
}
const stores = getStores();
res.setHeader("Content-Type", "application/json; charset=utf-8");
res.setHeader("Access-Control-Allow-Origin", "*");
Expand Down
4 changes: 2 additions & 2 deletions packages/server/src/api/crawler.ts
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
import { Router } from "express";
import request from "request";
import request from "request-promise-native";
import { createHandler } from "./apiTools";
import env from "enterprise-core/dist/env";
import Websocket from "ws";

const getJobs = createHandler(() => {
return request.get("http://" + env.crawlerHost + ":3000/");
return request.get("http://" + env.crawlerHost + ":" + env.crawlerPort + "/");
});

const liveSockets = [] as Websocket[];
Expand Down
99 changes: 99 additions & 0 deletions packages/server/src/api/user.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import {
appEventStorage,
databaseStorage,
episodeStorage,
jobStorage,
mediumStorage,
Expand Down Expand Up @@ -41,6 +42,12 @@ import { mediumRouter } from "./medium";
import { newsRouter } from "./news";
import { processRouter } from "./process";
import { crawlerRouter } from "./crawler";
import { CrawlerStatus, DatabaseStatus, Status } from "../types";
import os from "os";
import { readFile } from "fs/promises";
import path from "path";
import appConfig from "enterprise-core/dist/env";
import requestPromise from "request-promise-native";

export const authenticate: Handler = (req, res, next) => {
let { uuid, session } = req.body;
Expand Down Expand Up @@ -288,6 +295,97 @@ export const getAllAppEvents = createHandler((req) => {
return appEventStorage.getAppEvents(filter);
});

async function getDatabaseStatus(): Promise<DatabaseStatus> {
try {
const [dbVersion] = await databaseStorage.getServerVersion();
return {
status: "available",
host: `${appConfig.dbHost}:${appConfig.dbPort}`,
type: "mariadb",
version: dbVersion.version,
};
} catch (error) {
return {
status: "unavailable",
host: `${appConfig.dbHost}:${appConfig.dbPort}`,
type: "mariadb",
};
}
}

async function getCrawlerStatus(): Promise<CrawlerStatus> {
try {
const status = await requestPromise.get({
url: `http://${appConfig.crawlerHost}:${appConfig.crawlerPort}/status`,
timeout: 500, // milliseconds
});

let statusObject: CrawlerStatus;

if (isString(status)) {
statusObject = JSON.parse(status);
} else if (status && typeof status === "object" && !Array.isArray(status)) {
statusObject = status;
} else {
return {
status: "invalid",
};
}

statusObject.status = "available";
return statusObject;
} catch (error) {
return {
status: "unavailable",
};
}
}

const getStatus = createHandler(async (): Promise<Status> => {
const packageJsonPath = path.join(path.dirname(path.dirname(__dirname)), "package.json");

const [database, crawler, packageString] = await Promise.all([
getDatabaseStatus(),
getCrawlerStatus(),
readFile(packageJsonPath, { encoding: "utf8" }),
]);

let packageJson: any;

try {
packageJson = JSON.parse(packageString);
} catch (error) {
packageJson = { project_version: "Error" };
}

return {
crawler,
database,
server: {
cpu_average: os.loadavg(),
memory: process.memoryUsage(),
freemem: os.freemem(),
totalmem: os.totalmem(),
uptime: os.uptime(),
project_version: packageJson.version,
node_version: process.version,
config: {
dbConLimit: appConfig.dbConLimit,
dbHost: appConfig.dbHost,
dbUser: appConfig.dbUser,
dbPort: appConfig.dbPort,
crawlerHost: appConfig.crawlerHost,
crawlerPort: appConfig.crawlerPort,
crawlerWSPort: appConfig.crawlerWSPort,
port: appConfig.port,
measure: appConfig.measure,
development: appConfig.development,
stopScrapeEvents: appConfig.stopScrapeEvents,
},
},
};
});

/**
* Creates the User Api Router.
*
Expand Down Expand Up @@ -792,6 +890,7 @@ export function userRouter(): Router {
* description: List array
*/
router.get("/events", getAllAppEvents);
router.get("/status", getStatus);

router.use("/medium", mediumRouter());
router.use("/jobs", jobsRouter());
Expand Down
48 changes: 48 additions & 0 deletions packages/server/src/types.ts
Original file line number Diff line number Diff line change
@@ -1,7 +1,55 @@
import { HookConfig } from "enterprise-scraper/dist/externals/custom/types";
import env from "enterprise-core/dist/env";
import { memoryUsage } from "process";

export interface HookTest {
config: HookConfig;
key: keyof HookConfig;
param: string;
}

export interface Status {
server: ServerStatus;
crawler: CrawlerStatus;
database: DatabaseStatus;
}

export interface HookStatus {
all: Array<{ name: string; domain: string; medium: number }>;
toc: Array<{ name: string; pattern: string }>;
download: Array<{ name: string; pattern: string }>;
search: Array<{ name: string }>;
tocSearch: Array<{ name: string; pattern: string }>;
news: Array<{ name: string; link: string }>;
}

export type CrawlerStatus =
| ({
status: "available";
hooks: HookStatus;
} & ServerStatus)
| { status: "timout" | "unavailable" | "invalid" };

export type DatabaseStatus =
| {
status: "available";
type: string;
version: string;
host: string;
}
| {
status: "timout" | "unavailable";
type: string;
host: string;
};

export interface ServerStatus {
cpu_average: [number, number, number] | number[];
memory: ReturnType<typeof memoryUsage>;
uptime: number;
freemem: number;
totalmem: number;
project_version: string;
node_version: string;
config: Omit<typeof env, "dbPassword">;
}
9 changes: 8 additions & 1 deletion packages/website/src/Httpclient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ import {
JobHistoryItem,
} from "./siteTypes";
import { AddPart, AppEvent, AppEventFilter, EmptyPromise, JobStatSummary } from "enterprise-core/src/types";
import { HookTest } from "enterprise-server/src/types";
import { HookTest, Status } from "enterprise-server/src/types";
import { CustomHook } from "enterprise-core/dist/types";

/**
Expand Down Expand Up @@ -61,6 +61,9 @@ const restApi = createRestDefinition({
events: {
get: true,
},
status: {
get: true,
},
jobs: {
get: true,
enable: {
Expand Down Expand Up @@ -532,6 +535,10 @@ export const HttpClient = {
return this.queryServer(serverRestApi.api.user.hook.custom.get);
},

getStatus(): Promise<Status> {
return this.queryServer(serverRestApi.api.user.status.get);
},

getAllMediaInWaits(search?: MediumInWaitSearch): Promise<MediumInWait[]> {
return this.queryServer(serverRestApi.api.user.medium.unused.get, search);
},
Expand Down
Loading

0 comments on commit 5e2de1a

Please sign in to comment.