14 #include <system_error>
16 #include <fmt/format.h>
17 #include <nlohmann/json.hpp>
21 namespace fs = std::filesystem;
26 constexpr
const std::string_view PATH_IN_PROGRESS =
"in-progress";
27 constexpr
const std::string_view PATH_ARCHIVE =
"archive";
28 constexpr
const std::string_view FILE_DAQ_LIST =
"list.json";
29 constexpr
const std::string_view SUFFIX_DAQ_CONTEXT =
"-context.json";
30 constexpr
const std::string_view SUFFIX_DAQ_STATUS =
"-status.json";
31 constexpr
const std::string_view FILE_TEMP_EXT =
".swap";
34 void SafeWrite(T
const& content, fs::path
const& destination) {
36 auto temp = destination;
37 auto temp_file = destination.filename().native();
38 temp_file += FILE_TEMP_EXT;
39 temp.replace_filename(temp_file);
42 std::fstream temp_file(temp, std::ios::out | std::ios::trunc);
43 temp_file << std::setw(4) << content;
45 throw std::runtime_error(
46 fmt::format(
"File contains error after write: '{}'", temp.native()));
51 fs::rename(temp, destination);
53 std::throw_with_nested(
54 std::runtime_error(fmt::format(
"Failed to write file '{}'", destination.native())));
63 void InitRootWorkspace(fs::path
const& root) {
65 auto status = fs::status(root);
66 if (fs::exists(status)) {
67 if (!fs::is_directory(status)) {
68 throw std::invalid_argument(
69 fmt::format(
"Workspace root exist but is not a directory: {}", root.native()));
72 for (
auto const& entry : fs::directory_iterator(root)) {
73 if (entry.path().extension() == FILE_TEMP_EXT) {
78 if (entry.path().filename() == FILE_DAQ_LIST) {
81 if (entry.path().filename() == PATH_IN_PROGRESS) {
84 if (entry.path().filename() == PATH_ARCHIVE) {
87 throw std::invalid_argument(
88 fmt::format(
"Workspace {} has unexpected contents - aborting to prevent filesystem "
89 "modifications due to misconfiguration",
94 fs::create_directories(root);
95 fs::create_directory(root / PATH_IN_PROGRESS);
96 fs::create_directory(root / PATH_ARCHIVE);
102 auto status = fs::status(m_root);
103 if (status.type() != fs::file_type::directory && status.type() != fs::file_type::not_found) {
104 throw std::invalid_argument(
105 fmt::format(
"Workspace root directory '{}' is not a directory", m_root.string())
109 InitRootWorkspace(m_root);
111 if (!fs::exists(m_root / FILE_DAQ_LIST)) {
112 WorkspaceImpl::StoreList({});
117 void WorkspaceImpl::RemoveDaq(std::string
const&
id) {
118 fs::remove(MakePath(FileType::Context,
id));
119 fs::remove(MakePath(FileType::Status,
id));
122 void WorkspaceImpl::ArchiveDaq(std::string
const&
id) {
123 auto new_context = m_root / PATH_ARCHIVE / (
id + std::string(SUFFIX_DAQ_CONTEXT));
124 auto new_status = m_root / PATH_ARCHIVE / (
id + std::string(SUFFIX_DAQ_STATUS));
126 if (fs::exists(new_context)) {
128 for (
auto idx = 1u; idx < 0xffff; ++idx) {
129 auto tmp = new_context;
130 tmp += std::to_string(idx);
131 if (!fs::exists(tmp)) {
132 new_context.swap(tmp);
137 if (fs::exists(new_status)) {
139 for (
auto idx = 1u; idx < 0xffff; ++idx) {
140 auto tmp = new_status;
141 tmp += std::to_string(idx);
142 if (!fs::exists(tmp)) {
143 new_context.swap(tmp);
148 fs::rename(MakePath(FileType::Context,
id), new_context);
149 fs::rename(MakePath(FileType::Status,
id), new_status);
152 auto WorkspaceImpl::LoadList() const -> std::vector<std::
string> {
153 auto file = m_root / FILE_DAQ_LIST;
155 std::fstream fs(file, std::ios::in);
156 auto json = nlohmann::json::parse(fs);
157 return json.get<std::vector<std::string>>();
159 std::throw_with_nested(
160 std::runtime_error(fmt::format(
"Failed to load DAQ list '{}'", file.native())));
164 void WorkspaceImpl::StoreList(std::vector<std::string>
const& list)
const {
168 SafeWrite(json, m_root / FILE_DAQ_LIST);
170 std::throw_with_nested(std::runtime_error(
"Failed to store DAQ list"));
174 auto WorkspaceImpl::LoadStatus(std::string
const&
id)
const ->
Status {
176 std::fstream fs(MakePath(FileType::Status,
id));
177 auto json = nlohmann::json::parse(fs);
178 return json.get<
Status>();
180 std::throw_with_nested(std::runtime_error(
"Failed to load DAQ status"));
184 void WorkspaceImpl::StoreStatus(
Status const& status)
const {
188 SafeWrite(json, MakePath(FileType::Status, status.
id));
190 std::throw_with_nested(std::runtime_error(
"Failed to store DAQ status"));
194 auto WorkspaceImpl::LoadContext(std::string
const&
id)
const ->
DaqContext {
195 auto file = MakePath(FileType::Context,
id);
197 std::fstream fs(file, std::ios::in);
198 auto json = nlohmann::json::parse(fs);
200 json.get_to(context);
203 std::throw_with_nested(std::runtime_error(
204 fmt::format(
"Failed to load DAQ Context '{}'", file.native())));
208 void WorkspaceImpl::StoreContext(
DaqContext const& context)
const {
212 SafeWrite(json, MakePath(FileType::Context, context.
id));
214 std::throw_with_nested(std::runtime_error(
"Failed to store DAQ status"));
218 std::filesystem::path WorkspaceImpl::MakePath(FileType type, std::string
const&
id)
const {
220 case FileType::Context:
221 return m_root / PATH_IN_PROGRESS / (
id + std::string(SUFFIX_DAQ_CONTEXT));
222 case FileType::Status:
223 return m_root / PATH_IN_PROGRESS / (
id + std::string(SUFFIX_DAQ_STATUS));
WorkspaceImpl(std::filesystem::path root)
Opens or creates workspace in the specified location, using that as a root.
Declares JSON support for serialization.
daq::Workspace interface and implementation declaration
NLOHMANN_JSON_SERIALIZE_ENUM(State, { {State::NotStarted, "NotStarted"}, {State::Starting, "Starting"}, {State::Acquiring, "Acquiring"}, {State::Stopping, "Stopping"}, {State::Stopped, "Stopped"}, {State::NotScheduled, "NotScheduled"}, {State::Scheduled, "Scheduled"}, {State::Transferring, "Transferring"}, {State::Merging, "Merging"}, {State::Releasing, "Releasing"}, {State::AbortingAcquiring, "AbortingAcquiring"}, {State::AbortingMerging, "AbortingMerging"}, {State::Aborted, "Aborted"}, {State::Completed, "Completed"}, }) void to_json(nlohmann void to_json(nlohmann::json &j, Alert const &p)
Structure carrying context needed to start a Data Acquisition and construct a Data Product Specificat...
std::string id
DAQ identfier, possibly provided by user.
Non observable status object that keeps stores status of data acquisition.