125 lines
4.2 KiB
C++
125 lines
4.2 KiB
C++
#include <sys/types.h>
|
|
#include <unistd.h>
|
|
|
|
#include <catch2/catch.hpp>
|
|
#include <filesystem>
|
|
#include <iostream>
|
|
|
|
#include "daggy/Serialization.hpp"
|
|
#include "daggy/Utilities.hpp"
|
|
#include "daggy/executors/task/SlurmTaskExecutor.hpp"
|
|
|
|
namespace fs = std::filesystem;
|
|
|
|
#ifdef DAGGY_ENABLE_SLURM
|
|
|
|
TEST_CASE("slurm_execution", "[slurm_executor]")
|
|
{
|
|
daggy::executors::task::SlurmTaskExecutor ex;
|
|
|
|
daggy::ConfigValues defaultJobValues{{"minCPUs", "1"},
|
|
{"minMemoryMB", "100"},
|
|
{"minTmpDiskMB", "0"},
|
|
{"priority", "1"},
|
|
{"timeLimitSeconds", "200"},
|
|
{"userID", std::to_string(getuid())},
|
|
{"workDir", fs::current_path().string()},
|
|
{"tmpDir", fs::current_path().string()}};
|
|
|
|
SECTION("Simple Run")
|
|
{
|
|
daggy::Task task{.job{
|
|
{"command", std::vector<std::string>{"/usr/bin/echo", "abc", "123"}}}};
|
|
|
|
task.job.merge(defaultJobValues);
|
|
|
|
REQUIRE(ex.validateTaskParameters(task.job));
|
|
|
|
auto recFuture = ex.execute("command", task);
|
|
auto rec = recFuture.get();
|
|
|
|
REQUIRE(rec.rc == 0);
|
|
REQUIRE(rec.outputLog.size() >= 6);
|
|
REQUIRE(rec.errorLog.empty());
|
|
}
|
|
|
|
SECTION("Error Run")
|
|
{
|
|
daggy::Task task{
|
|
.job{{"command", daggy::executors::task::SlurmTaskExecutor::Command{
|
|
"/usr/bin/expr", "1", "+", "+"}}}};
|
|
task.job.merge(defaultJobValues);
|
|
|
|
auto recFuture = ex.execute("command", task);
|
|
auto rec = recFuture.get();
|
|
|
|
REQUIRE(rec.rc != 0);
|
|
REQUIRE(rec.errorLog.size() >= 20);
|
|
REQUIRE(rec.outputLog.empty());
|
|
}
|
|
|
|
SECTION("Large Output")
|
|
{
|
|
const std::vector<std::string> BIG_FILES{"/usr/share/dict/linux.words",
|
|
"/usr/share/dict/cracklib-small",
|
|
"/etc/ssh/moduli"};
|
|
|
|
for (const auto &bigFile : BIG_FILES) {
|
|
if (!std::filesystem::exists(bigFile))
|
|
continue;
|
|
|
|
daggy::Task task{
|
|
.job{{"command", daggy::executors::task::SlurmTaskExecutor::Command{
|
|
"/usr/bin/cat", bigFile}}}};
|
|
task.job.merge(defaultJobValues);
|
|
|
|
auto recFuture = ex.execute("command", task);
|
|
auto rec = recFuture.get();
|
|
|
|
REQUIRE(rec.rc == 0);
|
|
REQUIRE(rec.outputLog.size() == std::filesystem::file_size(bigFile));
|
|
REQUIRE(rec.errorLog.empty());
|
|
break;
|
|
}
|
|
}
|
|
|
|
SECTION("Parameter Expansion")
|
|
{
|
|
std::string testParams{R"({"DATE": ["2021-05-06", "2021-05-07" ]})"};
|
|
auto params = daggy::configFromJSON(testParams);
|
|
|
|
std::string taskJSON =
|
|
R"({"B": {"job": {"command": ["/usr/bin/echo", "{{DATE}}"]}, "children": ["C"]}})";
|
|
auto tasks = daggy::tasksFromJSON(taskJSON, defaultJobValues);
|
|
|
|
auto result = daggy::expandTaskSet(tasks, ex, params);
|
|
REQUIRE(result.size() == 2);
|
|
}
|
|
|
|
SECTION("Build with expansion")
|
|
{
|
|
std::string testParams{
|
|
R"({"DATE": ["2021-05-06", "2021-05-07" ], "SOURCE": "name"})"};
|
|
auto params = daggy::configFromJSON(testParams);
|
|
std::string testTasks =
|
|
R"({"A": {"job": {"command": ["/bin/echo", "A"]}, "children": ["B"]}, "B": {"job": {"command": ["/bin/echo", "B", "{{SOURCE}}", "{{DATE}}"]}, "children": ["C"]}, "C": {"job": {"command": ["/bin/echo", "C"]}}})";
|
|
auto tasks = daggy::expandTaskSet(
|
|
daggy::tasksFromJSON(testTasks, defaultJobValues), ex, params);
|
|
REQUIRE(tasks.size() == 4);
|
|
}
|
|
|
|
SECTION("Build with expansion using parents instead of children")
|
|
{
|
|
std::string testParams{
|
|
R"({"DATE": ["2021-05-06", "2021-05-07" ], "SOURCE": "name"})"};
|
|
auto params = daggy::configFromJSON(testParams);
|
|
std::string testTasks =
|
|
R"({"A": {"job": {"command": ["/bin/echo", "A"]}}, "B": {"job": {"command": ["/bin/echo", "B", "{{SOURCE}}", "{{DATE}}"]}, "parents": ["A"]}, "C": {"job": {"command": ["/bin/echo", "C"]}, "parents": ["A"]}})";
|
|
auto tasks = daggy::expandTaskSet(
|
|
daggy::tasksFromJSON(testTasks, defaultJobValues), ex, params);
|
|
|
|
REQUIRE(tasks.size() == 4);
|
|
}
|
|
}
|
|
#endif
|