Skip to content

Commit

Permalink
infinite scroll
Browse files Browse the repository at this point in the history
  • Loading branch information
wardviaene committed Sep 6, 2024
1 parent 6ae208a commit 0ce2e6e
Show file tree
Hide file tree
Showing 5 changed files with 114 additions and 73 deletions.
100 changes: 60 additions & 40 deletions pkg/rest/stats.go
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ import (
"strings"
"time"

"github.com/in4it/wireguard-server/pkg/storage"
"github.com/in4it/wireguard-server/pkg/wireguard"
)

const MAX_LOG_OUTPUT_LINES = 5
const MAX_LOG_OUTPUT_LINES = 100

func (c *Context) userStatsHandler(w http.ResponseWriter, r *http.Request) {
if r.PathValue("date") == "" {
Expand Down Expand Up @@ -248,6 +249,13 @@ func (c *Context) packetLogsHandler(w http.ResponseWriter, r *http.Request) {
offset = i
}
}
pos := int64(0)
if r.FormValue("pos") != "" {
i, err := strconv.ParseInt(r.FormValue("pos"), 10, 0)
if err == nil {
pos = i
}
}
// get all users
users := c.UserStore.ListUsers()
userMap := make(map[string]string)
Expand Down Expand Up @@ -278,53 +286,51 @@ func (c *Context) packetLogsHandler(w http.ResponseWriter, r *http.Request) {
if !dateEqual(time.Now(), date) { // date is in local timezone, and we are UTC, so also read next file
statsFiles = append(statsFiles, path.Join(wireguard.VPN_STATS_DIR, wireguard.VPN_PACKETLOGGER_DIR, userID+"-"+date.AddDate(0, 0, 1).Format("2006-01-02")+".log"))
}
logInputData := bytes.NewBuffer([]byte{})
//OpenFilesFromPos(statsFiles, 0) ([]io.Reader, error)
for _, statsFile := range statsFiles {
if c.Storage.Client.FileExists(statsFile) {
fileLogData, err := c.Storage.Client.ReadFile(statsFile)
if err != nil {
c.returnError(w, fmt.Errorf("readfile error: %s", err), http.StatusBadRequest)
return
}
logInputData.Write(fileLogData)
}
}

pos := int64(0)
scanner := bufio.NewScanner(logInputData)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
advance, token, err = bufio.ScanLines(data, atEOF)
pos += int64(advance)
statsFiles = filterNonExistentFiles(c.Storage.Client, statsFiles)
fileReaders, err := c.Storage.Client.OpenFilesFromPos(statsFiles, pos)
if err != nil {
c.returnError(w, fmt.Errorf("error while reading files: %s", err), http.StatusBadRequest)
return
})
}
for _, fileReader := range fileReaders {
defer fileReader.Close()
}

for scanner.Scan() && len(logData.Data) < MAX_LOG_OUTPUT_LINES {
inputSplit := strings.Split(scanner.Text(), ",")
timestamp, err := time.Parse(wireguard.TIMESTAMP_FORMAT, inputSplit[0])
if err != nil {
continue // invalid record
for _, logInputData := range fileReaders { // read multiple files
if len(logData.Data) >= MAX_LOG_OUTPUT_LINES {
break
}
timestamp = timestamp.Add(time.Duration(offset) * time.Minute)
if dateEqual(timestamp, date) {
if !filterLogRecord(logTypeFilter, inputSplit[1]) {
row := LogRow{
Timestamp: timestamp.Format("2006-01-02 15:04:05"),
Data: inputSplit[1:],
scanner := bufio.NewScanner(logInputData)
scanner.Split(func(data []byte, atEOF bool) (advance int, token []byte, err error) {
advance, token, err = bufio.ScanLines(data, atEOF)
pos += int64(advance)
return
})
for scanner.Scan() && len(logData.Data) < MAX_LOG_OUTPUT_LINES { // read multiple lines
inputSplit := strings.Split(scanner.Text(), ",")
timestamp, err := time.Parse(wireguard.TIMESTAMP_FORMAT, inputSplit[0])
if err != nil {
continue // invalid record
}
timestamp = timestamp.Add(time.Duration(offset) * time.Minute)
if dateEqual(timestamp, date) {
if !filterLogRecord(logTypeFilter, inputSplit[1]) {
row := LogRow{
Timestamp: timestamp.Format("2006-01-02 15:04:05"),
Data: inputSplit[1:],
}
logData.Data = append(logData.Data, row)
}
logData.Data = append(logData.Data, row)
}
}
}

if err := scanner.Err(); err != nil {
c.returnError(w, fmt.Errorf("log file read (scanner) error: %s", err), http.StatusBadRequest)
return
} else {
if len(logData.Data) < MAX_LOG_OUTPUT_LINES { // todo: and check if it is last file
pos = -1 // no more records
if err := scanner.Err(); err != nil {
c.returnError(w, fmt.Errorf("log file read (scanner) error: %s", err), http.StatusBadRequest)
return
}
}
if len(logData.Data) < MAX_LOG_OUTPUT_LINES {
pos = -1 // no more records
}

// set position
logData.NextPos = pos
Expand All @@ -345,6 +351,16 @@ func (c *Context) packetLogsHandler(w http.ResponseWriter, r *http.Request) {
c.write(w, out)
}

func filterNonExistentFiles(storage storage.Iface, files []string) []string {
res := []string{}
for _, file := range files {
if storage.FileExists(file) {
res = append(res, file)
}
}
return res
}

func getColor(i int) string {
colors := []string{
"#DEEFB7",
Expand Down Expand Up @@ -384,6 +400,10 @@ func filterLogRecord(logTypeFilter []string, logType string) bool {
return false
}

if logTypeFilterItem == "dns" && logType == "udp" {
return false
}

splitLogTypes := strings.Split(logTypeFilterItem, "+")
for _, splitLogType := range splitLogTypes {
if splitLogType == logType {
Expand Down
2 changes: 1 addition & 1 deletion pkg/storage/iface.go
Original file line number Diff line number Diff line change
Expand Up @@ -21,5 +21,5 @@ type ReadWriter interface {
}

type Seeker interface {
OpenFilesFromPos(names []string, pos int64) ([]io.Reader, error)
OpenFilesFromPos(names []string, pos int64) ([]io.ReadCloser, error)
}
8 changes: 2 additions & 6 deletions pkg/storage/local/read.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,13 @@ func (l *LocalStorage) ReadFile(name string) ([]byte, error) {
return os.ReadFile(path.Join(l.path, name))
}

func (l *LocalStorage) OpenFilesFromPos(names []string, pos int64) ([]io.Reader, error) {
readers := []io.Reader{}
func (l *LocalStorage) OpenFilesFromPos(names []string, pos int64) ([]io.ReadCloser, error) {
readers := []io.ReadCloser{}
for _, name := range names {
file, err := os.Open(path.Join(l.path, name))
if err != nil {
return nil, fmt.Errorf("cannot open file (%s): %s", name, err)
}
defer file.Close()
stat, err := file.Stat()
if err != nil {
return nil, fmt.Errorf("cannot get file stat (%s): %s", name, err)
Expand All @@ -34,8 +33,5 @@ func (l *LocalStorage) OpenFilesFromPos(names []string, pos int64) ([]io.Reader,
readers = append(readers, file)
}
}
if len(readers) == 0 {
return nil, fmt.Errorf("no file contents to read")
}
return readers, nil
}
2 changes: 1 addition & 1 deletion pkg/testing/mocks/storage.go
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,6 @@ func (m *MockMemoryStorage) Remove(name string) error {
return nil
}

func (m *MockMemoryStorage) OpenFilesFromPos(names []string, pos int64) ([]io.Reader, error) {
func (m *MockMemoryStorage) OpenFilesFromPos(names []string, pos int64) ([]io.ReadCloser, error) {
return nil, fmt.Errorf("not implemented")
}
75 changes: 50 additions & 25 deletions webapp/src/Routes/PacketLogs/PacketLogs.tsx
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
import { Card, Container, Text, Table, Title, Button, Grid, Select, MultiSelect, Popover} from "@mantine/core";
import { Card, Container, Text, Table, Title, Button, Grid, Select, MultiSelect, Popover, Group} from "@mantine/core";
import { AppSettings } from "../../Constants/Constants";
import { keepPreviousData, useQuery } from "@tanstack/react-query";
import { useInfiniteQuery, useQuery, useQueryClient } from "@tanstack/react-query";
import { useAuthContext } from "../../Auth/Auth";
import { Link, useParams, useSearchParams } from "react-router-dom";
import { Link, useSearchParams } from "react-router-dom";
import { TbSettings } from "react-icons/tb";
import { DatePickerInput } from "@mantine/dates";
import { useState } from "react";
import { useEffect, useState } from "react";
import React from "react";

type LogsDataResponse = {
enabled: boolean;
Expand All @@ -16,6 +17,7 @@ type LogsDataResponse = {
type LogData = {
schema: LogDataSchema;
rows: LogRow[];
nextPos: number;
}
type LogDataSchema = {
columns: string[];
Expand Down Expand Up @@ -44,11 +46,10 @@ export function PacketLogs() {
const [logType, setLogType] = useState<string[]>([])
const [logsDate, setLogsDate] = useState<Date | null>(dateParam === null ? new Date() : new Date(dateParam));
const [user, setUser] = useState<string>(userParam === null ? "all" : userParam)
const [page, setPage] = useState(1)
const { isPending, error, data } = useQuery<LogsDataResponse>({
queryKey: ['packetlogs', user, logsDate, logType, page],
queryFn: () =>
fetch(AppSettings.url + '/stats/packetlogs/'+(user === undefined || user === "" ? "all" : user)+'/'+(logsDate == undefined ? getDate(new Date()) : getDate(logsDate)) + "?offset="+timezoneOffset+"&logtype="+encodeURIComponent(logType.join(",")), {
const { isPending, fetchNextPage, hasNextPage, error, data } = useInfiniteQuery<LogsDataResponse>({
queryKey: ['packetlogs', user, logsDate, logType],
queryFn: async ({ pageParam }) =>
fetch(AppSettings.url + '/stats/packetlogs/'+(user === undefined || user === "" ? "all" : user)+'/'+(logsDate == undefined ? getDate(new Date()) : getDate(logsDate)) + "?pos="+pageParam+"&offset="+timezoneOffset+"&logtype="+encodeURIComponent(logType.join(",")), {
headers: {
"Content-Type": "application/json",
"Authorization": "Bearer " + authInfo.token
Expand All @@ -57,24 +58,40 @@ export function PacketLogs() {
return res.json()
}
),
placeholderData: page == 1 ? undefined : keepPreviousData,
initialPageParam: 0,
getNextPageParam: (lastRequest) => lastRequest.logData.nextPos === -1 ? null : lastRequest.logData.nextPos,
})

useEffect(() => {
const handleScroll = () => {
const { scrollTop, clientHeight, scrollHeight } =
document.documentElement;
if (scrollTop + clientHeight >= scrollHeight - 20) {
fetchNextPage();
}
};

window.addEventListener("scroll", handleScroll);
return () => {
window.removeEventListener("scroll", handleScroll);
};
}, [fetchNextPage])

if(isPending) return "Loading..."
if(error) return 'A backend error has occurred: ' + error.message

if(!data.enabled || data.logTypes.length == 0) { // show disabled page if not enabled
if(data.pages.length === 0 || !data.pages[0].enabled || data.pages[0].logTypes.length == 0) { // show disabled page if not enabled
return (
<Container my={40}>
<Title ta="center" style={{marginBottom: 20}}>
Packet Logs
</Title>
<Card withBorder radius="md" padding="xl" bg="var(--mantine-color-body)">
<Text fz="xs" tt="uppercase" fw={700} c="dimmed">
{ !data.enabled ?
{ !data.pages[0].enabled ?
"Packet Logs are not activated. Activate packet logging in the VPN Settings."
:
data.logTypes.length == 0 ? "Packet logs are activated, but no packet logging types are selected. Select at least one packet log type." : null
data.pages[0].logTypes.length == 0 ? "Packet logs are activated, but no packet logging types are selected. Select at least one packet log type." : null
}
</Text>
<Card.Section inheritPadding mt="sm" pb="md">
Expand All @@ -89,15 +106,19 @@ export function PacketLogs() {
)
}

const rows = data.logData.rows.map((row, i) => (
<Table.Tr key={i}>
<Table.Td>{row.t}</Table.Td>
{row.d.map((element, y) => {
return (
<Table.Td key={i+"-"+y}>{element}</Table.Td>
)
})}
</Table.Tr>
const rows = data.pages.map((group, groupIndex) => (
<React.Fragment key={groupIndex}>
{group.logData.rows.map((row, i) => (
<Table.Tr key={i}>
<Table.Td>{row.t}</Table.Td>
{row.d.map((element, y) => {
return (
<Table.Td key={i+"-"+y}>{element}</Table.Td>
)
})}
</Table.Tr>
))}
</React.Fragment>
));
return (
<Container my={40} size="80rem">
Expand All @@ -115,9 +136,9 @@ export function PacketLogs() {
</Grid.Col>
<Grid.Col span={2}>
<Select
data={Object.keys(data.users).map((key) => {
data={Object.keys(data.pages[0].users).map((key) => {
return {
label: data.users[key],
label: data.pages[0].users[key],
value: key,
}
})}
Expand All @@ -139,7 +160,7 @@ export function PacketLogs() {
searchable
hidePickedOptions
comboboxProps={{ offset: 0, withinPortal: false}}
data={data.logTypes}
data={data.pages[0].logTypes}
value={logType}
onChange={setLogType}
size="xs"
Expand Down Expand Up @@ -169,6 +190,10 @@ export function PacketLogs() {
}
</Table.Tbody>
</Table>
<Group justify="center">
{hasNextPage ? <Button onClick={() => fetchNextPage()} variant="default">Loading more...</Button> : null}
</Group>

</Container>

)
Expand Down

0 comments on commit 0ce2e6e

Please sign in to comment.