Peano 4
Loading...
Searching...
No Matches
plots_llvm_paper_likwid.py
Go to the documentation of this file.
1#!/usr/bin/env python
2
3import numpy as np
4from tkinter import Tcl
5import glob, os
6import re
7from matplotlib import gridspec
8import matplotlib.pyplot as plt
9import matplotlib as mpl
10import pandas as pd
11
12
13"""
14Script to read the likwid output data and make plots.
15Read likwid cvs files using Pandas.
16"""
17
18# Input
19folder_input = "/home/cristianbarrahinojosa/Overleaf/paper-llvm2022/experiments/18_sph_simulation/hamilton8/Noh_2D/output_np_400_et_0.01/likwid/"
20
21datatype_inputs = ["MEM", "CACHE"]
22
23for datatype_input in datatype_inputs:
24 filename_template = "output_" + datatype_input + "_cores_*_*.cvs"
25 filename_template_regex = "output_" + datatype_input + "_cores_(.*)_(.*)"
26
27 cores_array = [1, 2, 4, 8, 16, 20, 24, 28, 32, 36, 40, 44, 48, 52, 56, 60, 64]
28
29 # Initialize template dataframe
30 if datatype_input == "MEM":
31 data_template_SUM = {
32 "Metric SUM": [
33 "Runtime (RDTSC) [s] STAT",
34 "Runtime unhalted [s] STAT",
35 "Clock [MHz] STAT",
36 "CPI STAT",
37 "Memory bandwidth [MBytes/s] STAT",
38 "Memory data volume [GBytes] STAT",
39 ],
40 }
41 data_template_AVG = {
42 "Metric AVG": [
43 "Runtime (RDTSC) [s] STAT",
44 "Runtime unhalted [s] STAT",
45 "Clock [MHz] STAT",
46 "CPI STAT",
47 "Memory bandwidth [MBytes/s] STAT",
48 "Memory data volume [GBytes] STAT",
49 ],
50 }
51 elif datatype_input == "CACHE":
52 data_template_SUM = {
53 "Metric SUM": [
54 "CPI STAT",
55 "data cache requests STAT",
56 "data cache request STAT ",
57 "data cache misses STAT",
58 "data cache miss rate STAT",
59 "data cache miss ratio STAT",
60 ],
61 }
62 data_template_AVG = {
63 "Metric AVG": [
64 "CPI STAT",
65 "data cache requests STAT",
66 "data cache request rate STAT",
67 "data cache misses STAT",
68 "data cache miss rate STAT",
69 "data cache miss ratio STAT",
70 ],
71 }
72
73 # Read cvs data and transform into numpy array
74 data_SUM_on = pd.DataFrame(data_template_SUM)
75 data_SUM_off = pd.DataFrame(data_template_SUM)
76 data_AVG_on = pd.DataFrame(data_template_AVG)
77 data_AVG_off = pd.DataFrame(data_template_AVG)
78
79 # The relevant data is at the bottom of the file, so lets skip some rows
80 N_metrics = len(data_SUM_on)
81 offset_row = -N_metrics
82
83 # List all files matching the template
84 path = glob.glob(folder_input + filename_template)
85 path = Tcl().call("lsort", "-dict", path)
86
87 assert path != "", "No matching file found in path. Check input data."
88
89 for file in path:
90 absfilename = file
91 tempname = os.path.basename(absfilename)
92 filename = os.path.splitext(tempname)[0]
93
94 # Get cores number via regex
95 pf = re.compile(filename_template_regex)
96
97 match = pf.search(filename)
98 cores = match.group(1)
99 clang = match.group(2)
100
101 # Read cvs file and trim
102 df_input = pd.read_csv(file, index_col=0, header=0, on_bad_lines="skip")
103
104 # Column with SUM STAT
105 SUM = df_input["Info"].iloc[offset_row:]
106
107 # Column with Avg STAT
108 if cores != "1":
109 AVG = df_input["Unnamed: 4"].iloc[offset_row:]
110 else:
111 AVG = SUM # no difference for 1 core
112
113 if clang == "on":
114 data_SUM_on["cores " + str(cores)] = SUM.values
115 data_AVG_on["cores " + str(cores)] = AVG.values
116 elif clang == "off":
117 data_SUM_off["cores " + str(cores)] = SUM.values
118 data_AVG_off["cores " + str(cores)] = AVG.values
119
120 # Convert results into python arrays
121 SUM_on = np.zeros((N_metrics, len(cores_array)))
122 AVG_on = np.zeros((N_metrics, len(cores_array)))
123
124 SUM_off = np.zeros((N_metrics, len(cores_array)))
125 AVG_off = np.zeros((N_metrics, len(cores_array)))
126
127 for i, cores in enumerate(cores_array):
128 for metric in range(N_metrics):
129 SUM_on[metric, i] = float(data_SUM_on["cores " + str(cores)][metric])
130 AVG_on[metric, i] = float(data_AVG_on["cores " + str(cores)][metric])
131
132 SUM_off[metric, i] = float(data_SUM_off["cores " + str(cores)][metric])
133 AVG_off[metric, i] = float(data_AVG_off["cores " + str(cores)][metric])
134
135 # Store data for plotting
136 if datatype_input == "MEM":
137 N_dt = 1e2
138 runtime_on = AVG_on[0] / N_dt
139 runtime_off = AVG_off[0] / N_dt
140
141 clock_speed_on = AVG_on[2]
142 clock_speed_off = AVG_off[2]
143
144 data_volume_on = AVG_on[-1]
145 data_volume_off = AVG_off[-1]
146
147 elif datatype_input == "CACHE":
148 cache_req_on = AVG_on[1] / 1e12
149 cache_req_off = AVG_off[1] / 1e12
150
151 cache_miss_ratio_on = AVG_on[5]
152 cache_miss_ratio_off = AVG_off[5]
153
154 print(f"Done Reading cvs data for datatype: {datatype_input}.")
155
156
157# Plot Setup
158font = {"size": 12}
159plt.rc("font", **font)
160plt.rc("font", family="serif")
161plt.rc("text", usetex=False)
162
163fig = plt.figure(figsize=(10, 8))
164gs = gridspec.GridSpec(2, 2, wspace=0.30, hspace=0.25)
165
166ax0 = plt.subplot(gs[0])
167
168# Runtime plot
169plt.scatter(
170 cores_array,
171 runtime_on,
172 marker="o",
173 edgecolors="r",
174 facecolors="r",
175 s=25,
176 zorder=10,
177 label=r"on",
178)
179plt.plot(cores_array, runtime_on, c="r", lw=1.2)
180plt.scatter(cores_array, runtime_off, marker="^", c="b", s=25, zorder=10, label=r"off")
181plt.plot(cores_array, runtime_off, c="b", lw=1.2, zorder=11)
182
183plt.xlabel(r"Cores")
184plt.ylabel(r"Time per time step [s]")
185plt.xscale("log", base=2)
186# plt.yscale( "log", base=2 )
187plt.margins(0.05, 0.1)
188plt.tick_params(direction="in", top=True, right=True)
189plt.tick_params(direction="in", top=True, right=True)
190plt.gca().xaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
191plt.gca().yaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
192plt.xticks([1, 2, 4, 8, 16, 32, 64])
193# plt.yticks([1,2,4,8,16,32,64])
194
195leg = plt.legend(loc="upper right", fontsize=10)
196
197# Memory data volume
198ax1 = plt.subplot(gs[1])
199plt.scatter(
200 cores_array,
201 data_volume_on,
202 marker="o",
203 edgecolors="r",
204 facecolors="r",
205 s=25,
206 zorder=10,
207 label=r"on",
208)
209plt.plot(cores_array, data_volume_on, c="r", lw=1.2, zorder=-1)
210plt.scatter(
211 cores_array, data_volume_off, marker="^", c="b", s=25, zorder=10, label=r"off"
212)
213plt.plot(cores_array, data_volume_off, c="b", lw=1.2, zorder=11)
214plt.xlabel(r"Cores")
215plt.ylabel(r"Memory data volume [GBytes]")
216plt.margins(0.05, 0.1)
217plt.xscale("log", base=2)
218# plt.yscale( "log", base=10)
219plt.gca().xaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
220# plt.gca().yaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
221plt.xticks([1, 2, 4, 8, 16, 32, 64])
222plt.tick_params(direction="in", top=True, right=True)
223
224
225# Cache requests
226ax1 = plt.subplot(gs[2])
227plt.scatter(
228 cores_array,
229 cache_req_on,
230 marker="o",
231 edgecolors="r",
232 facecolors="r",
233 s=25,
234 zorder=10,
235 label=r"on",
236)
237plt.plot(cores_array, cache_req_on, c="r", lw=1.2, zorder=-1)
238plt.scatter(
239 cores_array, cache_req_off, marker="^", c="b", s=25, zorder=10, label=r"off"
240)
241plt.plot(cores_array, cache_req_off, c="b", lw=1.2, zorder=11)
242plt.xlabel(r"Cores")
243plt.ylabel(r"L2 cache requests [$\times10^{12}$]")
244plt.margins(0.05, 0.1)
245plt.xscale("log", base=2)
246# plt.yscale( "log", base=10)
247plt.gca().xaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
248# plt.gca().yaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
249plt.xticks([1, 2, 4, 8, 16, 32, 64])
250plt.tick_params(direction="in", top=True, right=True)
251
252# Cache miss rate
253ax2 = plt.subplot(gs[3])
254plt.scatter(
255 cores_array,
256 100 * cache_miss_ratio_on,
257 marker="o",
258 edgecolors="r",
259 facecolors="r",
260 s=25,
261 zorder=10,
262 label=r"on",
263)
264plt.plot(cores_array, 100 * cache_miss_ratio_on, c="r", lw=1.2)
265plt.scatter(
266 cores_array,
267 100 * cache_miss_ratio_off,
268 marker="^",
269 c="b",
270 s=25,
271 zorder=10,
272 label=r"off",
273)
274plt.plot(cores_array, 100 * cache_miss_ratio_off, c="b", lw=1.2)
275plt.xlabel(r"Cores")
276plt.ylabel(r"L2 cache miss rate [%]")
277plt.margins(0.05, 0.1)
278plt.xscale("log", base=2)
279plt.gca().xaxis.set_major_formatter(mpl.ticker.ScalarFormatter())
280plt.xticks([1, 2, 4, 8, 16, 32, 64])
281plt.tick_params(direction="in", top=True, right=True)
282
283plt.show()
284fig.savefig("plot_likwid_np_400" + ".pdf", bbox_inches="tight")
285plt.close()