1// Copyright 2018 The Fuchsia Authors. All rights reserved.
2// Use of this source code is governed by a BSD-style license that can be
3// found in the LICENSE file
4
5#include <fcntl.h>
6#include <stdio.h>
7#include <string.h>
8#include <sys/stat.h>
9
10#include <fbl/function.h>
11#include <fbl/string.h>
12#include <fbl/unique_fd.h>
13#include <fbl/vector.h>
14#include <fs-management/mount.h>
15#include <fs-test-utils/fixture.h>
16#include <fs-test-utils/perftest.h>
17#include <unittest/unittest.h>
18
19namespace fs_test_utils {
20namespace {
21
22// File used to dump libs stdout. Allows verifying certain options.
23constexpr char kFakeStdout[] = "/data/fake_stdout.txt";
24
25bool ResultSetIsValid() {
26    BEGIN_TEST;
27    fbl::String err;
28    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
29    p_options.result_path = "some/path";
30    ASSERT_TRUE(p_options.IsValid(&err), err.c_str());
31    END_TEST;
32}
33
34bool SummaryPathSetIsValid() {
35    BEGIN_TEST;
36    fbl::String err;
37    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
38    p_options.summary_path = "some/path";
39    ASSERT_TRUE(p_options.IsValid(&err), err.c_str());
40    END_TEST;
41}
42
43bool PrintStatisticsSetIsValid() {
44    BEGIN_TEST;
45    fbl::String err;
46    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
47    p_options.print_statistics = true;
48    ASSERT_TRUE(p_options.IsValid(&err), err.c_str());
49    END_TEST;
50}
51
52bool NoOutputIsInvalid() {
53    BEGIN_TEST;
54    fbl::String err;
55    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
56    p_options.print_statistics = false;
57    p_options.result_path.clear();
58    p_options.summary_path.clear();
59
60    ASSERT_FALSE(p_options.IsValid(&err), err.c_str());
61    END_TEST;
62}
63
64bool InvalidOptionsReturnFalseAndPrintsUsage() {
65    BEGIN_TEST;
66    fbl::String err;
67    char arg0[] = "/some/path/binary";
68    char* argv[] = {arg0};
69    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
70    p_options.result_path = "some/path";
71    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
72    f_options.block_device_path = "some_path";
73    f_options.use_ramdisk = true;
74
75    ASSERT_FALSE(f_options.IsValid(&err));
76
77    FILE* fp = fopen(kFakeStdout, "w");
78    ASSERT_TRUE(fp);
79    ASSERT_FALSE(ParseCommandLineArgs(1, argv, &f_options, &p_options, fp));
80    fclose(fp);
81
82    // Usage is printed on error.
83    struct stat st;
84    stat(kFakeStdout, &st);
85    remove(kFakeStdout);
86    ASSERT_GT(st.st_size, 0);
87    END_TEST;
88}
89
90// Sanity check that we print into the stream when help option is provided.
91bool HelpPrintsUsageMessage() {
92    BEGIN_TEST;
93    char arg0[] = "/some/path/binary";
94    char arg1[] = "--help";
95    char* argv[] = {arg0, arg1};
96    fbl::String err;
97    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
98    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
99
100    FILE* fp = fopen(kFakeStdout, "w");
101    ASSERT_TRUE(fp);
102    ASSERT_FALSE(ParseCommandLineArgs(2, argv, &f_options, &p_options, fp));
103    fclose(fp);
104
105    struct stat st;
106    stat(kFakeStdout, &st);
107    remove(kFakeStdout);
108    ASSERT_GT(st.st_size, 0);
109    END_TEST;
110}
111
112// Verifies that ParseCommandLineArgs actually sets the respective fields in the
113// option structs.
114bool OptionsAreOverwritten() {
115    BEGIN_TEST;
116    fbl::Vector<fbl::String> argvs = {
117        "/some/binary",
118        "-p",
119        "--use_fvm",
120        "--fvm_slice_size",
121        "8192",
122        "--use_ramdisk",
123        "--ramdisk_block_size",
124        "1024",
125        "--ramdisk_block_count",
126        "500",
127        "--runs",
128        "4",
129        "--out",
130        "some_path",
131        "--summary_path",
132        "other_path",
133        "--print_statistics",
134        "--fs",
135        "blobfs",
136    };
137    const char* argv[argvs.size() + 1];
138    for (size_t i = 0; i < argvs.size(); ++i) {
139        argv[i] = argvs[i].data();
140    }
141    argv[argvs.size()] = nullptr;
142
143    fbl::String err;
144    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
145    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
146
147    FILE* fp = fopen(kFakeStdout, "w");
148    ASSERT_TRUE(fp);
149    ASSERT_TRUE(
150        ParseCommandLineArgs(static_cast<int>(argvs.size()), argv, &f_options, &p_options, fp));
151    fclose(fp);
152
153    // Usage is not logged.
154    struct stat st;
155    stat(kFakeStdout, &st);
156    remove(kFakeStdout);
157    ASSERT_EQ(st.st_size, 0);
158
159    // Parameters parsed.
160    ASSERT_TRUE(f_options.block_device_path == "");
161    ASSERT_TRUE(f_options.use_ramdisk);
162    ASSERT_EQ(f_options.ramdisk_block_size, 1024);
163    ASSERT_EQ(f_options.ramdisk_block_count, 500);
164    ASSERT_TRUE(f_options.use_fvm);
165    ASSERT_EQ(f_options.fvm_slice_size, 8192);
166    ASSERT_EQ(f_options.fs_type, DISK_FORMAT_BLOBFS);
167
168    ASSERT_FALSE(p_options.is_unittest);
169    ASSERT_TRUE(p_options.result_path == "some_path");
170    ASSERT_TRUE(p_options.summary_path == "other_path");
171    ASSERT_TRUE(p_options.print_statistics);
172    ASSERT_EQ(p_options.sample_count, 4);
173
174    END_TEST;
175}
176
177bool RunTestCasesPreservesOrder() {
178    BEGIN_TEST;
179    PerformanceTestOptions p_options = PerformanceTestOptions::UnitTest();
180    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
181    p_options.sample_count = 1;
182    fbl::Vector<int> calls;
183
184    auto test_1 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
185        state->DeclareStep("test_1");
186        while (state->KeepRunning()) {
187            calls.push_back(1);
188        }
189        return true;
190    };
191    auto test_2 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
192        state->DeclareStep("test_2");
193        while (state->KeepRunning()) {
194            calls.push_back(2);
195        }
196        return true;
197    };
198    auto test_3 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
199        state->DeclareStep("test_3");
200        while (state->KeepRunning()) {
201            calls.push_back(3);
202        }
203        return true;
204    };
205
206    TestCaseInfo info;
207    info.name = "MyTestCase";
208    info.tests.push_back({fbl::move(test_1), "test_1", /*required_disk_space=*/0});
209    info.tests.push_back({fbl::move(test_2), "test_2", 0});
210    info.tests.push_back({fbl::move(test_3), "test_3", 0});
211    info.teardown = false;
212
213    fbl::Vector<TestCaseInfo> test_cases;
214    test_cases.push_back(fbl::move(info));
215    ASSERT_TRUE(RunTestCases(f_options, p_options, test_cases, /*out=*/nullptr));
216
217    // Verify order is preserved.
218    ASSERT_EQ(calls.size(), 3);
219    ASSERT_EQ(calls[0], 1);
220    ASSERT_EQ(calls[1], 2);
221    ASSERT_EQ(calls[2], 3);
222
223    END_TEST;
224}
225
226bool RunTestCasesPreservesOrderWithMultipleSamples() {
227    BEGIN_TEST;
228    PerformanceTestOptions p_options = PerformanceTestOptions::UnitTest();
229    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
230    p_options.is_unittest = false;
231    p_options.sample_count = 10;
232    fbl::Vector<int> calls;
233
234    auto test_1 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
235        state->DeclareStep("test_1");
236        while (state->KeepRunning()) {
237            calls.push_back(1);
238        }
239        return true;
240    };
241    auto test_2 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
242        state->DeclareStep("test_2");
243        while (state->KeepRunning()) {
244            calls.push_back(2);
245        }
246        return true;
247    };
248    auto test_3 = [&calls](perftest::RepeatState* state, Fixture* fixture) {
249        state->DeclareStep("test_3");
250        while (state->KeepRunning()) {
251            calls.push_back(3);
252        }
253        return true;
254    };
255
256    TestCaseInfo info;
257    info.sample_count = 20;
258    info.name = "MyTestCase";
259    info.tests.push_back({fbl::move(test_1), "test_1", /*required_disk_space=*/0});
260    info.tests.push_back({fbl::move(test_2), "test_2", 0});
261    info.tests.push_back({fbl::move(test_3), "test_3", 0});
262    info.teardown = false;
263
264    fbl::Vector<TestCaseInfo> test_cases;
265    test_cases.push_back(fbl::move(info));
266    ASSERT_TRUE(RunTestCases(f_options, p_options, test_cases, /*out=*/nullptr));
267
268    // Verify order is preserved.
269    ASSERT_EQ(calls.size(), 60);
270    for (int i = 0; i < 20; ++i) {
271        ASSERT_EQ(calls[i], 1);
272        ASSERT_EQ(calls[(20 + i)], 2);
273        ASSERT_EQ(calls[(40 + i)], 3);
274    }
275
276    END_TEST;
277}
278
279bool RunTestCasesWritesResultsAndStatistics() {
280    BEGIN_TEST;
281    PerformanceTestOptions p_options = PerformanceTestOptions::PerformanceTest();
282    p_options.result_path = "/data/results.json";
283    p_options.summary_path = "/data/summary.txt";
284    p_options.print_statistics = true;
285
286    FixtureOptions f_options = FixtureOptions::Default(DISK_FORMAT_MINFS);
287    p_options.sample_count = 1;
288
289    auto test_1 = [](perftest::RepeatState* state, Fixture* fixture) {
290        state->DeclareStep("test_1");
291        state->DeclareStep("test_2");
292        while (state->KeepRunning()) {
293            state->NextStep();
294        }
295        return true;
296    };
297
298    TestCaseInfo info;
299    info.name = "MyTestCase";
300    info.tests.push_back({fbl::move(test_1), "test_1", /*required_disk_space=*/0});
301    info.teardown = false;
302
303    fbl::Vector<TestCaseInfo> test_cases;
304    test_cases.push_back(fbl::move(info));
305
306    FILE* fp = fopen(kFakeStdout, "w+");
307    ASSERT_TRUE(fp);
308    ASSERT_TRUE(RunTestCases(f_options, p_options, test_cases, fp));
309    fseek(fp, 0, SEEK_SET);
310    // Look for test_1.test_1 in fake_std.txt (test_name.step_name).
311    char* buffer = nullptr;
312    size_t length = 0;
313    ssize_t read;
314    bool found_1 = false;
315    bool found_2 = false;
316    while ((read = getline(&buffer, &length, fp)) != -1 && (!found_1 || !found_2)) {
317        if (strstr(buffer, "test_1.test_1")) {
318            found_1 = true;
319        } else if (strstr(buffer, "test_1.test_2")) {
320            found_2 = true;
321        }
322        free(buffer);
323        buffer = nullptr;
324        length = 0;
325    }
326    free(buffer);
327    buffer = nullptr;
328    remove(kFakeStdout);
329    fclose(fp);
330    EXPECT_TRUE(found_1);
331    EXPECT_TRUE(found_2);
332
333    struct stat st;
334    stat(p_options.result_path.c_str(), &st);
335    remove(p_options.result_path.c_str());
336    EXPECT_GT(st.st_size, 0);
337
338    stat(p_options.summary_path.c_str(), &st);
339    remove(p_options.summary_path.c_str());
340    EXPECT_GT(st.st_size, 0);
341
342    END_TEST;
343}
344
345BEGIN_TEST_CASE(FsPerformanceTestOptions)
346RUN_TEST(ResultSetIsValid)
347RUN_TEST(SummaryPathSetIsValid)
348RUN_TEST(PrintStatisticsSetIsValid)
349RUN_TEST(NoOutputIsInvalid)
350END_TEST_CASE(FsPerformanceTestOptions)
351
352BEGIN_TEST_CASE(FsPerformanceTestLib)
353RUN_TEST(InvalidOptionsReturnFalseAndPrintsUsage)
354RUN_TEST(OptionsAreOverwritten)
355RUN_TEST(HelpPrintsUsageMessage)
356RUN_TEST(RunTestCasesPreservesOrder)
357RUN_TEST(RunTestCasesPreservesOrderWithMultipleSamples)
358RUN_TEST(RunTestCasesWritesResultsAndStatistics)
359END_TEST_CASE(FsPerformanceTestLib)
360
361} // namespace
362} // namespace fs_test_utils
363