Skip to content

Commit

Permalink
for #37 adding nfs pv hdd logs & all data
Browse files Browse the repository at this point in the history
for #37 adding nfs pv hdd logs & all data
  • Loading branch information
subash-taranga authored Jul 29, 2024
1 parent 7d40369 commit cef2c0f
Show file tree
Hide file tree
Showing 17 changed files with 921 additions and 0 deletions.
161 changes: 161 additions & 0 deletions logs/nfs-pv-hdd/cost-logs/My Billing Account_Reports, 2024-07-16.csv

Large diffs are not rendered by default.

356 changes: 356 additions & 0 deletions logs/nfs-pv-hdd/cost-logs/My Billing Account_Reports, 2024-07-21.csv

Large diffs are not rendered by default.

Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
tar: Removing leading `/' from member names
44.0 B 0:00:19 [2.27 B/s] [ <=> ]
tar: Removing leading `/' from member names
44.0 B 0:00:30 [1.45 B/s] [ <=> ]
tar: Removing leading `/' from member names
44.0 B 0:00:48 [ 937miB/s] [ <=> ]
tar: Removing leading `/' from member names
44.0 B 0:01:08 [ 661miB/s] [ <=> ]
tar: Removing leading `/' from member names
44.0 B 0:01:40 [ 448miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:03:14 [ 231miB/s] [ <=> ]
tar: Removing leading `/' from member names
78.0 B 0:04:51 [ 274miB/s] [ <=>
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
tar: Removing leading `/' from member
78.0 B 0:00:27 [2.88 B/s] [ <=> ]
tar: Removing leading `/' from member names
78.0 B 0:00:36 [2.11 B/s] [ <=> ]
tar: Removing leading `/' from member names
78.0 B 0:01:04 [1.20 B/s] [ <=> ]
tar: Removing leading `/' from member names
44.0 B 0:01:20 [ 560miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:01:53 [ 395miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:03:56 [ 190miB/s] [ <=> ]
tar: Removing leading `/' from member names
78.0 B 0:05:21 [ 248miB/s]
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
tar: Removing leading `/' from member names ]
44.0 B 0:00:38 [1.14 B/s] [ <=> ]
tar: Removing leading `/' from member names ]
78.0 B 0:01:03 [1.24 B/s] [ <=> ]
tar: Removing leading `/' from member names ]
78.0 B 0:01:33 [ 850miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:02:27 [ 305miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
78.0 B 0:02:56 [ 452miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:05:09 [ 145miB/s] [ <=> ]
tar: Removing leading `/' from member names ]
44.0 B 0:08:06 [92.6miB/s] [ <=>
56 changes: 56 additions & 0 deletions logs/nfs-pv-hdd/python/Compare Bucket SSD HDD.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd

# Finland SSD data
finland_ssd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [18, 26, 47, 69, 89, 190, 278],
'Type': ['Finland SSD'] * 7
}

# Milan SSD data
milan_ssd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [31, 44, 77, 87, 134, 244, 404],
'Type': ['Milan SSD'] * 7
}

# Finland HDD data
finland_hdd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [19, 30, 48, 68, 100, 194, 291],
'Type': ['Finland HDD'] * 7
}

# Milan HDD data
milan_hdd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [27, 36, 64, 80, 113, 236, 321],
'Type': ['Milan HDD'] * 7
}

# Bucket download data
bucket_download_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 1536, 2048, 2560, 5120, 7680, 10240],
'Download Time (s)': [20.219, 32.020, 49.162, 67.144, 97.556, 144.417, 193.086, 238.967, 477.123, 718.011, 955.075],
'Type': ['Bucket Download'] * 11
}

# Combine data into a single DataFrame using concat and reset the index to avoid duplicates
finland_ssd_df = pd.DataFrame(finland_ssd_data)
milan_ssd_df = pd.DataFrame(milan_ssd_data)
finland_hdd_df = pd.DataFrame(finland_hdd_data)
milan_hdd_df = pd.DataFrame(milan_hdd_data)
bucket_download_df = pd.DataFrame(bucket_download_data)

data = pd.concat([finland_ssd_df, milan_ssd_df, finland_hdd_df, milan_hdd_df, bucket_download_df]).reset_index(drop=True)

# Create seaborn plot with adjusted axis limits and a clearer display
plt.figure(figsize=(12, 8))
sns.lineplot(data=data, x='Download Time (s)', y='File Size (MB)', hue='Type', marker='o')
plt.title('Download Time vs File Size')
plt.xlabel('Download Time (s)')
plt.ylabel('File Size (MB)')
plt.grid(True)
plt.show()
25 changes: 25 additions & 0 deletions logs/nfs-pv-hdd/python/Europe north1-Finland.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt

# Prepare the data
data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072], # Size in MB for the respective files
'Download Time (s)': [19, 30, 48, 68, 100, 194, 291] # Download time in seconds
}

# Create a DataFrame
df = pd.DataFrame(data)

# Create the plot
sns.set(style="whitegrid")
plt.figure(figsize=(10, 6))
sns.lineplot(data=df, y='File Size (MB)', x='Download Time (s)', marker='o')

# Add title and labels
plt.title('Download Time vs File Size Finland NFS PV HDD')
plt.ylabel('File Size (MB)')
plt.xlabel('Download Time (s)')

# Show the plot
plt.show()
25 changes: 25 additions & 0 deletions logs/nfs-pv-hdd/python/Europe west8-Milan.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt

# Prepare the data
data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072], # Size in MB for the respective files
'Download Time (s)': [27, 36, 64, 80, 113, 236, 321] # Download time in seconds
}

# Create a DataFrame
df = pd.DataFrame(data)

# Create the plot
sns.set(style="whitegrid")
plt.figure(figsize=(10, 6))
sns.lineplot(data=df, y='File Size (MB)', x='Download Time (s)', marker='o')

# Add title and labels
plt.title('Download Time vs File Size Milan NFS PV HDD')
plt.ylabel('File Size (MB)')
plt.xlabel('Download Time (s)')

# Show the plot
plt.show()
25 changes: 25 additions & 0 deletions logs/nfs-pv-hdd/python/Us west4-Las-Vegas.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt

# Prepare the data
data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072], # Size in MB for the respective files
'Download Time (s)': [38, 63, 93, 147, 176, 309, 486] # Download time in seconds
}

# Create a DataFrame
df = pd.DataFrame(data)

# Create the plot
sns.set(style="whitegrid")
plt.figure(figsize=(10, 6))
sns.lineplot(data=df, y='File Size (MB)', x='Download Time (s)', marker='o')

# Add title and labels
plt.title('Download Time vs File Size LAS VEGAS NFS PV HDD')
plt.ylabel('File Size (MB)')
plt.xlabel('Download Time (s)')

# Show the plot
plt.show()
41 changes: 41 additions & 0 deletions logs/nfs-pv-hdd/python/compare all.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd

# Finland NFS PV HDD data
finland_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [19, 30, 48, 68, 100, 194, 291],
'Location': ['Finland'] * 7
}

# Milan NFS PV HDD data
milan_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [27, 36, 64, 80, 113, 236, 321],
'Location': ['Milan'] * 7
}

# LAS VEGAS NFS PV HDD data
las_vegas_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [38, 63, 93, 147, 176, 309, 486],
'Location': ['Las Vegas'] * 7
}

# Combine data into a single DataFrame using concat and reset the index to avoid duplicates
finland_df = pd.DataFrame(finland_data)
milan_df = pd.DataFrame(milan_data)
las_vegas_df = pd.DataFrame(las_vegas_data)
data = pd.concat([finland_df, milan_df, las_vegas_df]).reset_index(drop=True)

# Create seaborn plot with adjusted axis limits and a clearer display
plt.figure(figsize=(10, 6))
sns.lineplot(data=data, y='File Size (MB)', x='Download Time (s)', hue='Location', marker='o')
plt.title('Download Time vs File Size')
plt.ylabel('File Size (MB)')
plt.xlabel('Download Time (s)')
plt.ylim(0, 3200)
plt.xlim(0, 500)
plt.grid(True)
plt.show()
48 changes: 48 additions & 0 deletions logs/nfs-pv-hdd/python/compare ssd with hdd in europe.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
import seaborn as sns
import matplotlib.pyplot as plt
import pandas as pd

# Finland SSD data
finland_ssd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [18, 26, 47, 69, 89, 190, 278],
'Type': ['Finland SSD'] * 7
}

# Milan SSD data
milan_ssd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [31, 44, 77, 87, 134, 244, 404],
'Type': ['Milan SSD'] * 7
}

# Finland HDD data
finland_hdd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [19, 30, 48, 68, 100, 194, 291],
'Type': ['Finland HDD'] * 7
}

# Milan HDD data
milan_hdd_data = {
'File Size (MB)': [200, 300, 512, 700, 1024, 2048, 3072],
'Download Time (s)': [27, 36, 64, 80, 113, 236, 321],
'Type': ['Milan HDD'] * 7
}

# Combine data into a single DataFrame using concat and reset the index to avoid duplicates
finland_ssd_df = pd.DataFrame(finland_ssd_data)
milan_ssd_df = pd.DataFrame(milan_ssd_data)
finland_hdd_df = pd.DataFrame(finland_hdd_data)
milan_hdd_df = pd.DataFrame(milan_hdd_data)

data = pd.concat([finland_ssd_df, milan_ssd_df, finland_hdd_df, milan_hdd_df]).reset_index(drop=True)

# Create seaborn plot with adjusted axis limits and a clearer display
plt.figure(figsize=(10, 6))
sns.lineplot(data=data, x='Download Time (s)', y='File Size (MB)', hue='Type', marker='o')
plt.title('Download Time vs File Size')
plt.xlabel('Download Time (s)')
plt.ylabel('File Size (MB)')
plt.grid(True)
plt.show()
10 changes: 10 additions & 0 deletions logs/nfs-pv-hdd/script/nfs_download_script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
#!/bin/bash
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/200.txt ./200.txt | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/300.txt ./300.txt | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/512.txt ./512.txt | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/700.txt ./700.txt | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/1GB.zip ./1GB.zip | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/2GB.zip ./2GB.zip | pv
kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/3GB.zip ./3GB.zip | pv
#kubectl cp nfs-server-7c569b7755-v2kqb:/tmp/10GB.zip ./10GB.zip | pv

12 changes: 12 additions & 0 deletions logs/nfs-pv-hdd/script/nfs_upload_script.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
#!/bin/bash
tar cf - 200.txt | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 300.txt | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 512.txt | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 700.txt | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 1GB.zip | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 2GB.zip | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
tar cf - 3GB.zip | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/
#tar cf - 10GB.zip | pv | kubectl exec -i nfs-server-7c569b7755-v2kqb -- tar xf - -C /tmp/



18 changes: 18 additions & 0 deletions logs/nfs-pv-hdd/terraform/disk.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
provider "google" {
project = "gcs-bucket-subash"
region = "europe-north1"
}

resource "google_compute_disk" "storage-nfs" {
name = "storage-nfs"
zone = "europe-north1-a"
type = "pd-standard" # Specifies HDD type

labels = {
environment = "nfs"
goog-gke-volume = ""
}

size = 100 # Size in GB
physical_block_size_bytes = 4096
}
16 changes: 16 additions & 0 deletions logs/nfs-pv-hdd/terraform/main.tf
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
resource "google_container_cluster" "primary" {
name = "nfs-hdd-cluster-finland"
location = "europe-north1-a"

node_pool {
name = "default-pool"
initial_node_count = 1
node_config {
machine_type = "e2-medium" # Specify the appropriate machine type
disk_size_gb = 100
disk_type = "pd-standard" # Specifies HDD type
}
}

network = "default"
}
60 changes: 60 additions & 0 deletions logs/nfs-pv-hdd/yaml/nfs.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
# nfs.yaml
---
apiVersion: apps/v1
kind: Deployment
metadata:
name: nfs-server
spec:
replicas: 1
selector:
matchLabels:
role: nfs-server
template:
metadata:
labels:
role: nfs-server
spec:
affinity:
nodeAffinity:
requiredDuringSchedulingIgnoredDuringExecution:
nodeSelectorTerms:
- matchExpressions:
- key: topology.kubernetes.io/zone
operator: In
values:
- europe-north1-a
containers:
- name: nfs-server
image: gcr.io/google_containers/volume-nfs:0.8
ports:
- name: nfs
containerPort: 2049
- name: mountd
containerPort: 20048
- name: rpcbind
containerPort: 111
securityContext:
privileged: true
volumeMounts:
- mountPath: /exports
name: nfs-pvc
volumes:
- name: nfs-pvc
gcePersistentDisk:
pdName: storage-nfs
fsType: ext4
---
apiVersion: v1
kind: Service
metadata:
name: nfs-server
spec:
ports:
- name: nfs
port: 2049
- name: mountd
port: 20048
- name: rpcbind
port: 111
selector:
role: nfs-server
Loading

0 comments on commit cef2c0f

Please sign in to comment.