aca-tasks/task1/main.cpp

106 lines
3.6 KiB
C++
Raw Normal View History

2023-10-31 13:48:54 +00:00
#include <fmt/format.h>
#include <vector>
2023-11-01 09:00:36 +00:00
#include <fstream>
2023-11-01 09:16:53 +00:00
#include <string>
#include <chrono>
2023-10-31 20:11:31 +00:00
#include <mergesort.h>
2023-10-31 13:48:54 +00:00
/*
Create a simple sorting application that uses the mergesort algorithm to sort a
large collection (e.g., 10^7 ) of 32-bit integers. The input data and output results
should be stored in files, and the I/O operations should be considered a
sequential part of the application. Mergesort is an algorithm that is considered
appropriate for parallel execution, although it cannot be equally divided between
an arbitrary number of processors, as Amdahls and Gustafson-Barsis laws
require.
Assuming that this equal division is possible, estimate α, i.e., the part of the
program that can be parallelized, by using a profiler like gprof or valgrind to
2023-11-01 09:19:25 +00:00
measure the duration of sorts execution relative to the overall execution
2023-10-31 13:48:54 +00:00
time. Use this number to estimate the predicted speedup for your program.
Does α depend on the size of the input? If it does, how should you modify
your predictions and their graphical illustration?
*/
2023-11-01 09:16:53 +00:00
template<typename T>
auto parse_file(std::ifstream &stream, std::vector<T> &vec) -> void {
std::string buf;
T convbuf;
while (std::getline(stream, buf)) {
convbuf = static_cast<T>(std::stoul(buf));
vec.emplace_back(std::move(convbuf));
}
}
2023-11-01 09:29:35 +00:00
auto main(int argc, char *argv[]) -> int {
2023-11-01 09:00:36 +00:00
try {
std::ifstream file("dataset.dat", std::ios_base::in);
2023-11-01 09:16:53 +00:00
if (!file.is_open()) {
fmt::print("Error opening file");
return -1;
}
2023-10-31 13:48:54 +00:00
fmt::print("Opened file {} sucessfully\n", "dummy");
2023-11-01 09:16:53 +00:00
std::vector<int32_t> dataset;
2023-10-31 13:48:54 +00:00
2023-11-01 09:16:53 +00:00
parse_file(file, dataset);
fmt::print("Read {} values from {}\n", dataset.size(), "dummy");
2023-10-31 13:48:54 +00:00
auto dataset_par = dataset;
auto dataset_seq = dataset;
2023-10-31 13:48:54 +00:00
auto t1 = std::chrono::high_resolution_clock::now();
algo::MergeSort_v1::sort(dataset_seq.begin(), dataset_seq.end(), [](int32_t a, int32_t b) {
2023-11-01 09:16:53 +00:00
return (a > b);
2023-11-01 09:00:36 +00:00
});
2023-11-01 09:16:53 +00:00
auto t2 = std::chrono::high_resolution_clock::now();
auto delay_ms = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1);
fmt::print("Sorted {} entries within {} ms in sequential\n", dataset_seq.size(), delay_ms.count());
2023-11-01 11:32:52 +00:00
2023-11-01 16:05:01 +00:00
const int nthreads = std::thread::hardware_concurrency();
t1 = std::chrono::high_resolution_clock::now();
2023-11-01 16:05:01 +00:00
algo::MergeSort_mt::sort(dataset_par, [](int32_t a, int32_t b) {
return (a > b);
2023-11-01 11:32:52 +00:00
}, nthreads);
t2 = std::chrono::high_resolution_clock::now();
delay_ms = std::chrono::duration_cast<std::chrono::milliseconds>(t2 - t1);
2023-11-01 11:32:52 +00:00
fmt::print("Sorted {} entries within {} ms in parallel using {} threads\n", dataset_seq.size(), delay_ms.count(), nthreads);
auto eq = (dataset_seq == dataset_par);
fmt::print("Equality: {}\n", eq);
fmt::print("Parallel dataset: {}; Sequential dataset: {}\n", dataset_par.size(), dataset_seq.size());
//fmt::print("Created {} recurstions", algo::MergeSort_v1::get_recursions());
std::ofstream ofile("dataset.out.dat", std::ios_base::out);
if(!ofile.is_open()) {
fmt::print("Error writing to file");
return -1;
}
for(auto &element : dataset_seq) {
ofile << std::to_string(element) << '\n';
}
file.close();
ofile.flush();
ofile.close();
fmt::print("Written to output file\n");
2023-10-31 20:11:31 +00:00
2023-11-01 09:00:36 +00:00
return 0;
2023-10-31 20:11:31 +00:00
2023-11-01 09:00:36 +00:00
} catch (std::exception e) {
2023-11-01 09:29:35 +00:00
fmt::print("Error occured: {}", e.what());
2023-11-01 09:00:36 +00:00
return -1;
2023-10-31 20:11:31 +00:00
}
2023-10-31 13:48:54 +00:00
}