Skip to content

Commit 528ede1

Browse files
Merge branch 'linode:develop' into develop
2 parents a831ac3 + 1cc7151 commit 528ede1

6 files changed

Lines changed: 602 additions & 3 deletions

File tree

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@linode/manager": Upcoming Features
3+
---
4+
5+
Utility setup changes for CSV download for `CloudPulse metrics widget data` ([#13484](https://github.com/linode/manager/pull/13484))
Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@linode/manager": Fixed
3+
---
4+
5+
Destination Form: the Endpoint field is not clearing validation error when a Bucket is selected ([#13494](https://github.com/linode/manager/pull/13494))
Lines changed: 296 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,296 @@
1+
import { describe, expect, it } from 'vitest';
2+
3+
import { FILTER_CONFIG } from '../../Utils/FilterConfig';
4+
import { generateCSVData } from './CloudPulseWidgetCSVUtils';
5+
6+
import type { CloudPulseServiceTypeFilterMap } from '../../Utils/models';
7+
import type { CSVDataProps } from './CloudPulseWidgetCSVUtils';
8+
9+
const DASHBOARD_NAME = 'Test Dashboard';
10+
const START_TIME_LABEL = 'Start Time';
11+
const DATA_INTERVAL_LABEL = 'Data Aggregation Interval';
12+
const DIMENSION_FILTERS_LABEL = 'Dimension Filters';
13+
14+
const baseProps: CSVDataProps = {
15+
dashboardName: DASHBOARD_NAME,
16+
data: [
17+
{ timestamp: 1718000000000, value: 42, value2: 100 },
18+
{ timestamp: 1718003600000, value: 43, value2: 110 },
19+
],
20+
dimensionFilters: [
21+
{
22+
dimension_label: 'test',
23+
operator: 'eq',
24+
value: 'A',
25+
},
26+
],
27+
dimensionOptions: [
28+
{ dimension_label: 'test', label: 'Test', values: ['A', 'B'] },
29+
],
30+
duration: {
31+
start: '2024-06-10T00:00:00Z',
32+
end: '2024-06-10T01:00:00Z',
33+
timeZone: 'UTC',
34+
preset: 'Reset',
35+
},
36+
filterConfig:
37+
FILTER_CONFIG.get(1) ??
38+
vi.mockObject<CloudPulseServiceTypeFilterMap>({
39+
capability: 'Managed Databases',
40+
filters: [],
41+
serviceType: 'dbaas',
42+
}),
43+
filters: {
44+
id: {
45+
test: 'A',
46+
},
47+
label: {
48+
test: ['A', 'B'],
49+
},
50+
},
51+
groupBy: ['region'],
52+
isDataLoading: false,
53+
serviceType: 'dbaas',
54+
widget: {
55+
label: 'CPU Usage',
56+
unit: '%',
57+
aggregate_function: 'avg',
58+
time_granularity: { value: 5, unit: 'minute' },
59+
chart_type: 'line',
60+
color: '#000000',
61+
entity_ids: [],
62+
filters: [],
63+
metric: 'cpu_usage',
64+
service_type: 'dbaas',
65+
namespace_id: 1,
66+
region_id: 1,
67+
serviceType: 'dbaas',
68+
size: 12,
69+
time_duration: { value: 1, unit: 'hour' },
70+
y_label: 'cpu_usage',
71+
},
72+
};
73+
74+
describe('generateCSVData', () => {
75+
it('should generate CSV with all sections', () => {
76+
const csv = generateCSVData(baseProps);
77+
78+
expect(csv[0]).toEqual(['Dashboard', 'Test Dashboard']);
79+
expect(csv.some((row) => row[0] === 'Group By')).toBe(true);
80+
expect(csv.some((row) => row[0] === 'Aggregation Function')).toBe(true);
81+
expect(csv.some((row) => row[0] === DATA_INTERVAL_LABEL)).toBe(true);
82+
expect(csv.some((row) => row[0] === 'Metric')).toBe(true);
83+
expect(csv.some((row) => row[0] === 'Unit')).toBe(true);
84+
expect(
85+
csv.some((row) => Array.isArray(row) && row.includes('time (UTC)'))
86+
).toBe(true);
87+
expect(csv.some((row) => Array.isArray(row) && row.includes(100))).toBe(
88+
true
89+
);
90+
});
91+
92+
it('should handle empty data', () => {
93+
const csv = generateCSVData({ ...baseProps, data: [] });
94+
expect(
95+
csv.some((row) => Array.isArray(row) && row.includes('time (UTC)'))
96+
).toBe(false);
97+
});
98+
99+
it('should handle no groupBy', () => {
100+
const csv = generateCSVData({ ...baseProps, groupBy: [] });
101+
expect(csv.some((row) => row[0] === 'Group By')).toBe(false);
102+
});
103+
104+
it('should handle no aggregation function', () => {
105+
const csv = generateCSVData({
106+
...baseProps,
107+
widget: { ...baseProps.widget, aggregate_function: '' },
108+
});
109+
expect(csv.some((row) => row[0] === 'Aggregation Function')).toBe(false);
110+
});
111+
112+
it('should include dimension filters', () => {
113+
const csv = generateCSVData({
114+
...baseProps,
115+
dimensionFilters: [
116+
{
117+
dimension_label: 'test',
118+
operator: 'eq',
119+
value: 'A',
120+
},
121+
],
122+
});
123+
expect(csv.some((row) => row[0] === DIMENSION_FILTERS_LABEL)).toBe(true);
124+
expect(
125+
csv.some((row) =>
126+
row[1] ? row[1].toString().includes('Test,eq,A') : false
127+
)
128+
).toBe(true);
129+
});
130+
131+
it('should format timestamps using the correct timezone', () => {
132+
const csv = generateCSVData({
133+
...baseProps,
134+
duration: {
135+
...baseProps.duration,
136+
timeZone: 'America/New_York',
137+
},
138+
});
139+
expect(
140+
csv.some((row) => Array.isArray(row) && row.includes('time (EDT)'))
141+
).toBe(true);
142+
// The formatted timestamp should include the correct hour for New York and timezone abbreviation
143+
const dataRow = csv.find((row) => Array.isArray(row) && row.includes(42));
144+
expect(dataRow?.[0]).toMatch('Jun 10, 2024, 2:13 AM');
145+
});
146+
147+
it('should handle empty dimensionFilters', () => {
148+
const csv = generateCSVData({
149+
...baseProps,
150+
dimensionFilters: [],
151+
});
152+
expect(csv.some((row) => row[0] === DIMENSION_FILTERS_LABEL)).toBe(false);
153+
});
154+
155+
it('should handle missing filter values gracefully', () => {
156+
const csv = generateCSVData({
157+
...baseProps,
158+
filters: {
159+
id: {},
160+
label: {},
161+
},
162+
});
163+
expect(csv.some((row) => row[0] === 'Region')).toBe(false);
164+
});
165+
166+
it('should filter data based on zoom range when zoomed', () => {
167+
const csv = generateCSVData({
168+
...baseProps,
169+
zoomRange: {
170+
left: 1718000000000, // First timestamp
171+
right: 1718000000000, // First timestamp only
172+
},
173+
});
174+
// Should only include the first data point
175+
const dataRows = csv.filter(
176+
(row) => Array.isArray(row) && typeof row[1] === 'number' && row[1] === 42
177+
);
178+
expect(dataRows.length).toBe(1);
179+
expect(dataRows[0]).toContain(42);
180+
});
181+
182+
it('should include all data when zoom range is dataMin/dataMax', () => {
183+
const csv = generateCSVData({
184+
...baseProps,
185+
zoomRange: {
186+
left: 'dataMin',
187+
right: 'dataMax',
188+
},
189+
});
190+
// Should include all data points
191+
expect(csv.some((row) => Array.isArray(row) && row.includes(42))).toBe(
192+
true
193+
);
194+
expect(csv.some((row) => Array.isArray(row) && row.includes(43))).toBe(
195+
true
196+
);
197+
});
198+
199+
it('should include all data when no zoom range is provided', () => {
200+
const csv = generateCSVData(baseProps);
201+
// Should include all data points
202+
expect(csv.some((row) => Array.isArray(row) && row.includes(42))).toBe(
203+
true
204+
);
205+
expect(csv.some((row) => Array.isArray(row) && row.includes(43))).toBe(
206+
true
207+
);
208+
});
209+
210+
it('should show preset name instead of start/end times for relative durations', () => {
211+
const csv = generateCSVData({
212+
...baseProps,
213+
duration: {
214+
...baseProps.duration,
215+
preset: 'Last 1 Hour',
216+
},
217+
});
218+
expect(
219+
csv.some((row) => row[0] === 'Time Range' && row[1] === 'Last 1 Hour')
220+
).toBe(true);
221+
expect(csv.some((row) => row[0] === START_TIME_LABEL)).toBe(false);
222+
expect(csv.some((row) => row[0] === 'End Time')).toBe(false);
223+
});
224+
225+
it('should show start/end times for custom/absolute time ranges', () => {
226+
const csv = generateCSVData({
227+
...baseProps,
228+
duration: {
229+
start: '2024-06-10T00:00:00Z',
230+
end: '2024-06-10T01:00:00Z',
231+
timeZone: 'UTC',
232+
preset: 'Reset',
233+
},
234+
});
235+
expect(csv.some((row) => row[0] === START_TIME_LABEL)).toBe(true);
236+
expect(csv.some((row) => row[0] === 'End Time')).toBe(true);
237+
expect(csv.some((row) => row[0] === 'Time Range')).toBe(false);
238+
});
239+
240+
it('should handle Auto time granularity correctly', () => {
241+
const csv = generateCSVData({
242+
...baseProps,
243+
widget: {
244+
...baseProps.widget,
245+
time_granularity: { value: -1, unit: 'Auto' },
246+
},
247+
});
248+
249+
const intervalRow = csv.find((row) => row[0] === DATA_INTERVAL_LABEL);
250+
expect(intervalRow?.[1]).toBe('Auto');
251+
});
252+
253+
it('should handle regular time granularity correctly', () => {
254+
const csv = generateCSVData({
255+
...baseProps,
256+
widget: {
257+
...baseProps.widget,
258+
time_granularity: { value: 30, unit: 'seconds' },
259+
},
260+
});
261+
262+
const intervalRow = csv.find(
263+
(row) => row[0] === 'Data Aggregation Interval'
264+
);
265+
expect(intervalRow?.[1]).toBe('30 seconds');
266+
});
267+
268+
it('should handle multiple dimension filters', () => {
269+
const csv = generateCSVData({
270+
...baseProps,
271+
dimensionFilters: [
272+
{ dimension_label: 'test', operator: 'eq', value: 'A' },
273+
{ dimension_label: 'test', operator: 'eq', value: 'B' },
274+
],
275+
dimensionOptions: [
276+
{ dimension_label: 'test', label: 'Test Label', values: ['A', 'B'] },
277+
],
278+
});
279+
280+
const filterRow = csv.find((row) => row[0] === DIMENSION_FILTERS_LABEL);
281+
expect(filterRow?.[1]).toContain('Test Label,eq,A;Test Label,eq,B');
282+
});
283+
284+
it('should include zoom range times when zoomed', () => {
285+
const csv = generateCSVData({
286+
...baseProps,
287+
zoomRange: {
288+
left: 1718000000000,
289+
right: 1718003600000,
290+
},
291+
});
292+
293+
expect(csv.some((row) => row[0] === 'Zoom Start Time')).toBe(true);
294+
expect(csv.some((row) => row[0] === 'Zoom End Time')).toBe(true);
295+
});
296+
});

0 commit comments

Comments
 (0)