kubetest is a open source test suite to test various aspects of Kubernetes cluster. One of the areas that of interest for a scaled environment is the Performance of cluster.
Configuring/Running kubetest is beyond scope of this blog - what I intend to share is how you can plot the results of these tests
API Responsiveness:
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import json
data = json.load(open('PM1_APIResponsiveness_load_2019-06-19T15:11:48Z.json'))['dataItems']
perc99_data = []
for item in data:
myl = []
myl.append(item['labels']['Verb'] + "-" +item['labels']['Scope'] + '-' + item['labels']['Resource'])
myl.append(item['data']['Perc50'])
myl.append(item['data']['Perc90'])
myl.append(item['data']['Perc99'])
perc99_data.append(myl)
df = pd.DataFrame(perc99_data, columns = ['API', 'LatencyP50', 'LatencyP90', 'LatencyP99'])
df.plot.bar()
index = np.arange(len(perc99_data))
plt.bar(index, df['LatencyP50'])
plt.xlabel("APIs")
plt.ylabel("Latency (ms)")
plt.xticks(index, df['API'], fontsize=5, rotation=30, ha='right')
plt.show()
cradhakrish-a01:chirag cradhakrishnan$ cat PM1_APIResponsiveness_load_2019-06-19T15\:11\:48Z.json
{
"version": "v1",
"dataItems": [
{
"data": {
"Perc50": 5.484,
"Perc90": 25.443,
"Perc99": 114.011
},
"unit": "ms",
"labels": {
"Count": "2358",
"Resource": "pods",
"Scope": "namespace",
"Subresource": "",
"Verb": "DELETE"
}
},
{
"data": {
"Perc50": 32.052,
"Perc90": 51.073,
"Perc99": 74.26
},
"unit": "ms",
"labels": {
"Count": "68",
"Resource": "services",
"Scope": "namespace",
"Subresource": "",
"Verb": "DELETE"
}
.
.
.
{
"data": {
"Perc50": 0.115,
"Perc90": 0.178,
"Perc99": 0.273
},
"unit": "ms",
"labels": {
"Count": "38",
"Resource": "secrets",
"Scope": "namespace",
"Subresource": "",
"Verb": "LIST"
}
}
]
}
Pod Responsiveness
import pandas as pd
import matplotlib.pyplot as plt
import numpy as np
import json
data = json.load(open('PM1_PodStartupLatency_density_2019-06-20T10:32:57Z.json'))['dataItems']
perc99_data = []
for item in data:
myl = []
myl.append(item['labels']['Metric'])
myl.append(item['data']['Perc50'])
myl.append(item['data']['Perc90'])
myl.append(item['data']['Perc99'])
myl.append(item['data']['Perc100'])
perc99_data.append(myl)
df = pd.DataFrame(perc99_data, columns = ['Metric', 'LatencyP50', 'LatencyP90', 'LatencyP99', 'LatencyP100'])
df.plot.bar()
index = np.arange(len(perc99_data))
plt.bar(index, df['LatencyP50'])
plt.xlabel("Metric")
plt.ylabel("Latency (ms)")
plt.xticks(index, df['Metric'], fontsize=5, rotation=30, ha='right')
plt.show()
cradhakrish-a01:chirag cradhakrishnan$ cat PM1_PodStartupLatency_density_2019-06-20T10:32:57Z.json
{
"version": "v1",
"dataItems": [
{
"data": {
"Perc100": 0,
"Perc50": 0,
"Perc90": 0,
"Perc99": 0
},
"unit": "ms",
"labels": {
"Metric": "create_to_schedule"
}
},
{
"data": {
"Perc100": 28000,
"Perc50": 9000,
"Perc90": 14000,
"Perc99": 26000
},
"unit": "ms",
"labels": {
"Metric": "schedule_to_run"
}
},
{
"data": {
"Perc100": -5327.442072,
"Perc50": -6894.455178,
"Perc90": -6155.301798,
"Perc99": -5537.035922
},
"unit": "ms",
"labels": {
"Metric": "run_to_watch"
}
},
{
"data": {
"Perc100": 20665.27986,
"Perc50": 1753.811386,
"Perc90": 6665.07306,
"Perc99": 18600.530946
},
"unit": "ms",
"labels": {
"Metric": "schedule_to_watch"
}
},
{
"data": {
"Perc100": 20665.27986,
"Perc50": 1753.811386,
"Perc90": 6665.07306,
"Perc99": 18600.530946
},
"unit": "ms",
"labels": {
"Metric": "pod_startup"
}
}
]
}