From 8e18fadf54b6e3d964d904bdedfface1da53a631 Mon Sep 17 00:00:00 2001 From: FancMa01 Date: Wed, 11 Sep 2024 08:31:28 -0600 Subject: [PATCH] Fix cluster dashboard (#870) Rework error handling function for authheader to always provide a message and not rely on response.json(); Add cluster storage and usage routes to new MVC structure. Remove file export options, as they are not functional enough for production environments --- .../dashboard/ExportMenu/ExportMenu.jsx | 90 +---- .../charts/CurrentClusterUsageCharts.jsx | 5 +- .../dashboard/clusterUsage/index.js | 5 +- .../src/components/common/AuthHeader.js | 15 +- .../server/controllers/clusterController.js | 337 ++++++++++++------ .../server/middlewares/clusterMiddleware.js | 22 +- Tombolo/server/routes/cluster/read.js | 233 ------------ Tombolo/server/routes/clusterRoutes.js | 14 +- 8 files changed, 292 insertions(+), 429 deletions(-) delete mode 100644 Tombolo/server/routes/cluster/read.js diff --git a/Tombolo/client-reactjs/src/components/application/dashboard/ExportMenu/ExportMenu.jsx b/Tombolo/client-reactjs/src/components/application/dashboard/ExportMenu/ExportMenu.jsx index 2db3787f3..3986f444f 100644 --- a/Tombolo/client-reactjs/src/components/application/dashboard/ExportMenu/ExportMenu.jsx +++ b/Tombolo/client-reactjs/src/components/application/dashboard/ExportMenu/ExportMenu.jsx @@ -1,13 +1,13 @@ import React, { useState, useEffect } from 'react'; import { useSelector } from 'react-redux'; import Text from '../../../common/Text'; -import { Button, message, Dropdown } from 'antd'; +import { Button, Dropdown } from 'antd'; import { DownOutlined } from '@ant-design/icons'; -import { authHeader, handleError } from '../../../common/AuthHeader.js'; + import { useLocation } from 'react-router-dom'; import DashboardModal from './DashboardModal'; -const ExportMenu = (selectedCluster) => { +const ExportMenu = () => { const { application: { applicationId }, } = useSelector((state) => state.applicationReducer); @@ -23,89 +23,21 @@ const ExportMenu = (selectedCluster) => { setDataType(splitName[splitName.length - 1]); }); - // const menu = ( - // handleMenuClick(e)}> - // - // {} - // - // - // {} - // - // - // {} - // - // - // ); - const menuItems = [ - { key: 'CSV', icon: , label: 'CSV' }, - { key: 'JSON', icon: , label: 'JSON' }, - { key: 'API', icon: , label: 'API' }, + { + key: 'API', + icon: , + label: 'API', + onClick: (e) => { + handleMenuClick(e); + }, + }, ]; const handleMenuClick = async (e) => { if (e.key === 'API') { setModalVisible(true); } - - if (e.key === 'CSV' || e.key === 'JSON') { - await getFile(e.key); - } - }; - - const getFile = async (type) => { - try { - //TO DO --- write checks using dataType State to reach out to correct API's - //example if (dataType === 'notifications') { - const payload = { - method: 'GET', - header: authHeader(), - }; - - let response; - if (dataType === 'notifications') { - response = await fetch(`/api/notifications/read/${applicationId}/file/${type}`, payload); - } - - if (dataType === 'clusterUsage') { - response = await fetch( - `/api/cluster/clusterStorageHistory/file/${type}/${selectedCluster.selectedCluster}`, - payload - ); - } - - if (!response.ok) handleError(response); - const blob = await response.blob(); - const newBlob = new Blob([blob]); - - const blobUrl = window.URL.createObjectURL(newBlob); - - const link = document.createElement('a'); - link.href = blobUrl; - link.setAttribute('download', `Tombolo-${dataType}.${type}`); - document.body.appendChild(link); - link.click(); - link.parentNode.removeChild(link); - - //send delete request after file is downloaded - const payload2 = { - method: 'DELETE', - header: authHeader(), - }; - if (dataType === 'notifications') { - const response2 = await fetch(`/api/notifications/read/${applicationId}/file/${type}`, payload2); - - if (!response2.ok) handleError(response2); - } - - if (dataType === 'clusterUsage') { - const response2 = await fetch(`/api/cluster/clusterStorageHistory/file/${type}`, payload2); - - if (!response2.ok) handleError(response2); - } - } catch (error) { - message.error('Failed to fetch notifications'); - } }; return ( diff --git a/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/charts/CurrentClusterUsageCharts.jsx b/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/charts/CurrentClusterUsageCharts.jsx index fef8b66d3..933814f83 100644 --- a/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/charts/CurrentClusterUsageCharts.jsx +++ b/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/charts/CurrentClusterUsageCharts.jsx @@ -27,7 +27,10 @@ function CurrentClusterUsageCharts({ selectedCluster, setSelectedCluster }) { }; const response = await fetch(`/api/cluster/currentClusterUsage/${clusterId}`, payload); - if (!response.ok) handleError(response); + if (!response.ok) { + handleError(response); + return; + } const data = await response.json(); const groupedUsage = []; diff --git a/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/index.js b/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/index.js index 31f2e0db3..d3d1259de 100644 --- a/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/index.js +++ b/Tombolo/client-reactjs/src/components/application/dashboard/clusterUsage/index.js @@ -90,7 +90,10 @@ function ClusterUsage() { const queryData = JSON.stringify({ clusterId, historyDateRange }); const response = await fetch(`/api/cluster/clusterStorageHistory/${queryData}`, payload); - if (!response.ok) handleError(response); + if (!response.ok) { + handleError(response); + return; + } const data = await response.json(); setClusterUsageHistory(data); } catch (err) { diff --git a/Tombolo/client-reactjs/src/components/common/AuthHeader.js b/Tombolo/client-reactjs/src/components/common/AuthHeader.js index 9a1a86cfa..5511cad39 100644 --- a/Tombolo/client-reactjs/src/components/common/AuthHeader.js +++ b/Tombolo/client-reactjs/src/components/common/AuthHeader.js @@ -19,13 +19,16 @@ export function handleError(response) { store.dispatch(userActions.logout()); } else if (response.status == 422) { throw Error('Error occurred while saving the data. Please check the form data'); + } else if (response.status == 404) { + message.error('404: Resource not found on server'); + return; + } else if (typeof response === 'string') { + message.error(response); + return; } else { - let errorMessage = ''; - response.json().then((responseData) => { - errorMessage = responseData.message; - //throw new Error(errorMessage); - message.error(errorMessage); - }); + //if we have not defined a handling above, throw undefined error + message.error('An undefined error occurred. Please try again later'); + return; } } diff --git a/Tombolo/server/controllers/clusterController.js b/Tombolo/server/controllers/clusterController.js index 3ef83c959..6ed64bd1a 100644 --- a/Tombolo/server/controllers/clusterController.js +++ b/Tombolo/server/controllers/clusterController.js @@ -1,4 +1,4 @@ -const {clusters}= require("../cluster-whitelist.js"); +const { clusters } = require("../cluster-whitelist.js"); const { AccountService, TopologyService, @@ -8,8 +8,10 @@ const logger = require("../config/logger"); const models = require("../models"); const { encryptString } = require("../utils/cipher.js"); const CustomError = require("../utils/customError.js"); - +const hpccUtil = require("../utils/hpcc-util.js"); +const hpccJSComms = require("@hpcc-js/comms"); const Cluster = models.cluster; +const moment = require("moment"); // Add a cluster - Without sending progress updates to client const addCluster = async (req, res) => { @@ -26,9 +28,9 @@ const addCluster = async (req, res) => { // Make sure cluster is whitelisted const cluster = clusters.find((c) => c.name === clusterName); - if (!cluster){ + if (!cluster) { return; - } + } const baseUrl = `${cluster.thor}:${cluster.thor_port}`; @@ -60,7 +62,9 @@ const addCluster = async (req, res) => { if (!defaultEngine) throw new CustomError("Default engine not found", 400); // Execute ECL code to get timezone offset - logger.verbose("Adding new cluster: Executing ECL code to get timezone offset"); + logger.verbose( + "Adding new cluster: Executing ECL code to get timezone offset" + ); const eclCode = "IMPORT Std; now := Std.Date.LocalTimeZoneOffset(); OUTPUT(now);"; @@ -112,13 +116,15 @@ const addCluster = async (req, res) => { if (password) { clusterPayload.hash = encryptString(password); } - + // Create cluster const newCluster = await Cluster.create(clusterPayload); res.status(201).json({ success: true, data: newCluster }); } catch (err) { logger.error(`Add cluster: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); } }; @@ -149,19 +155,31 @@ const addClusterWithProgress = async (req, res) => { // Make sure cluster is whitelisted const cluster = clusters.find((c) => c.name === clusterName); - if (!cluster){ + if (!cluster) { return; - } + } const baseUrl = `${cluster.thor}:${cluster.thor_port}`; // Check if cluster is reachable - sendUpdate({step: 1, success: true, message: "Authenticating cluster .."}); + sendUpdate({ + step: 1, + success: true, + message: "Authenticating cluster ..", + }); await new AccountService({ baseUrl, userID, password }).MyAccount(); - sendUpdate({step: 1, success: true, message: "Cluster authentication complete"}); + sendUpdate({ + step: 1, + success: true, + message: "Cluster authentication complete", + }); // Get default cluster (engine) if exists - if not pick the first one - sendUpdate({step: 2, success: true, message: "Selecting default engine .."}); + sendUpdate({ + step: 2, + success: true, + message: "Selecting default engine ..", + }); const { TpLogicalClusters: { TpLogicalCluster }, } = await new TopologyService({ @@ -185,11 +203,21 @@ const addClusterWithProgress = async (req, res) => { // if default cluster is not found, return error if (!defaultEngine) throw new CustomError("Default engine not found", 400); - sendUpdate({step: 2, success: true, message: "Default engine selection complete"}); + sendUpdate({ + step: 2, + success: true, + message: "Default engine selection complete", + }); // Execute ECL code to get timezone offset - sendUpdate({step: 3, success: true, message: "Getting timezone offset .."}); - logger.verbose("Adding new cluster: Executing ECL code to get timezone offset"); + sendUpdate({ + step: 3, + success: true, + message: "Getting timezone offset ..", + }); + logger.verbose( + "Adding new cluster: Executing ECL code to get timezone offset" + ); const eclCode = "IMPORT Std; now := Std.Date.LocalTimeZoneOffset(); OUTPUT(now);"; @@ -222,8 +250,16 @@ const addClusterWithProgress = async (req, res) => { const offSetInMinutes = parseInt(wuSummary.Result.Value) / 60; // throw new Error("Error occurred while getting timezone offset"); - sendUpdate({step: 3 , success: true, message: "Getting timezone offset complete"}); - sendUpdate({step: 4, success: true,message: "Preparing to save cluster ..."}); + sendUpdate({ + step: 3, + success: true, + message: "Getting timezone offset complete", + }); + sendUpdate({ + step: 4, + success: true, + message: "Preparing to save cluster ...", + }); // Payload const clusterPayload = { @@ -247,120 +283,215 @@ const addClusterWithProgress = async (req, res) => { } // Create cluster const newCluster = await Cluster.create(clusterPayload); - sendUpdate({step: 4, success: true,message: "Cluster added successfully", cluster: newCluster}); + sendUpdate({ + step: 4, + success: true, + message: "Cluster added successfully", + cluster: newCluster, + }); res.end(); // res.status(201).json({ success: true, data: newCluster }); } catch (err) { logger.error(`Add cluster: ${err.message}`); // res.status(err.status || 500).json({ success: false, message: err.message }); - sendUpdate({step: 99, success: false, message: err.message}); + sendUpdate({ step: 99, success: false, message: err.message }); res.end(); } }; // Retrieve all clusters const getClusters = async (req, res) => { - try { - // Get clusters ASC by name - const clusters = await Cluster.findAll({ - attributes: { exclude: ["hash"] }, - order: [["name", "ASC"]], - }); - res.status(200).json({ success: true, data: clusters }); - } catch (err) { - logger.error(`Get clusters: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); - } + try { + // Get clusters ASC by name + const clusters = await Cluster.findAll({ + attributes: { exclude: ["hash"] }, + order: [["name", "ASC"]], + }); + res.status(200).json({ success: true, data: clusters }); + } catch (err) { + logger.error(`Get clusters: ${err.message}`); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); + } }; // Retrieve a cluster const getCluster = async (req, res) => { - try { - // Get one cluster by id - const cluster = await Cluster.findOne({ - where: { id: req.params.id }, - attributes: { exclude: ["hash"] }, - }); - if (!cluster) throw new CustomError("Cluster not found", 404); - res.status(200).json({ success: true, data: cluster }); - } catch (err) { - logger.error(`Get cluster: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); - } -} + try { + // Get one cluster by id + const cluster = await Cluster.findOne({ + where: { id: req.params.id }, + attributes: { exclude: ["hash"] }, + }); + if (!cluster) throw new CustomError("Cluster not found", 404); + res.status(200).json({ success: true, data: cluster }); + } catch (err) { + logger.error(`Get cluster: ${err.message}`); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); + } +}; // Delete a cluster const deleteCluster = async (req, res) => { - try { - // Delete a cluster by id - const cluster = await Cluster.destroy({ where: { id: req.params.id } }); - if (!cluster) throw new CustomError("Cluster not found", 404); - res.status(200).json({ success: true, data: cluster }); - } catch (err) { - logger.error(`Delete cluster: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); - } -} + try { + // Delete a cluster by id + const cluster = await Cluster.destroy({ where: { id: req.params.id } }); + if (!cluster) throw new CustomError("Cluster not found", 404); + res.status(200).json({ success: true, data: cluster }); + } catch (err) { + logger.error(`Delete cluster: ${err.message}`); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); + } +}; // Update a cluster const updateCluster = async (req, res) => { - // Only username, password, adminEmails can be updated. only update that if it is present in the request body - try { - const { username, password, adminEmails, updatedBy } = req.body; - const cluster = await Cluster.findOne({ where: { id: req.params.id } }); - if (!cluster) throw new CustomError("Cluster not found", 404); - if (username) cluster.username = username; - if (password) cluster.hash = encryptString(password); - if (adminEmails) cluster.adminEmails = adminEmails; - cluster.updatedBy = updatedBy; - - await cluster.save(); - res.status(200).json({ success: true, data: cluster }); - } catch (err) { - logger.error(`Update cluster: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); - } - + // Only username, password, adminEmails can be updated. only update that if it is present in the request body + try { + const { username, password, adminEmails, updatedBy } = req.body; + const cluster = await Cluster.findOne({ where: { id: req.params.id } }); + if (!cluster) throw new CustomError("Cluster not found", 404); + if (username) cluster.username = username; + if (password) cluster.hash = encryptString(password); + if (adminEmails) cluster.adminEmails = adminEmails; + cluster.updatedBy = updatedBy; + + await cluster.save(); + res.status(200).json({ success: true, data: cluster }); + } catch (err) { + logger.error(`Update cluster: ${err.message}`); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); } +}; // Retrieve all whitelisted clusters const getClusterWhiteList = async (req, res) => { - try { - if (!clusters) throw new CustomError("Cluster whitelist not found", 404); - res.status(200).json({ success: true, data: clusters }); - } catch (err) { - logger.error(`Get cluster white list: ${err.message}`); - res.status(err.status || 500).json({ success: false, message: err.message }); - } -} - + try { + if (!clusters) throw new CustomError("Cluster whitelist not found", 404); + res.status(200).json({ success: true, data: clusters }); + } catch (err) { + logger.error(`Get cluster white list: ${err.message}`); + res + .status(err.status || 500) + .json({ success: false, message: err.message }); + } +}; // Ping HPCC cluster to find if it is reachable const pingCluster = async (req, res) => { - try { - const { name, username, password } = req.body; - const cluster = clusters.find((c) => c.name === name); - - // If bogus cluster name is provided, return error - if (!cluster) throw new CustomError("Cluster not whitelisted", 400); - - // construct base url - const baseUrl = `${cluster.thor}:${cluster.thor_port}`; - - // Ping cluster - await new AccountService({ baseUrl, userID: username, password }).MyAccount(); - res.status(200).json({ success: true, message: "Authorized" }); - } catch (err) { - let errMessage = "Unable to reach cluster"; - let statusCode = err.statusCode || 500; - - if (err.message.includes("Unauthorized")) { - errMessage = "Unauthorized"; - statusCode = 401; + try { + const { name, username, password } = req.body; + const cluster = clusters.find((c) => c.name === name); + + // If bogus cluster name is provided, return error + if (!cluster) throw new CustomError("Cluster not whitelisted", 400); + + // construct base url + const baseUrl = `${cluster.thor}:${cluster.thor_port}`; + + // Ping cluster + await new AccountService({ + baseUrl, + userID: username, + password, + }).MyAccount(); + res.status(200).json({ success: true, message: "Authorized" }); + } catch (err) { + let errMessage = "Unable to reach cluster"; + let statusCode = err.statusCode || 500; + + if (err.message.includes("Unauthorized")) { + errMessage = "Unauthorized"; + statusCode = 401; + } + res.status(statusCode).json({ success: false, message: errMessage }); + } +}; + +const clusterUsage = async (req, res) => { + try { + const { id } = req.params; + + //Get cluster details + let cluster = await hpccUtil.getCluster(id); // Checks if cluster is reachable and decrypts cluster credentials if any + const { thor_host, thor_port, username, hash } = cluster; + const clusterDetails = { + baseUrl: `${thor_host}:${thor_port}`, + userID: username || "", + password: hash || "", + }; + + //Use JS comms library to fetch current usage + const machineService = new hpccJSComms.MachineService(clusterDetails); + const targetClusterUsage = await machineService.GetTargetClusterUsageEx(); + + const maxUsage = targetClusterUsage.map((target) => ({ + name: target.Name, + maxUsage: target.max.toFixed(2), + meanUsage: target.mean.toFixed(2), + })); + res.status(200).send(maxUsage); + } catch (err) { + console.log(err); + res.status(503).json({ + success: false, + message: "Failed to fetch current cluster usage", + }); + logger.error(err); + } +}; + +const clusterStorageHistory = async (req, res) => { + try { + const { queryData } = req.params; + const query = JSON.parse(queryData); + + const data = await Cluster.findOne({ + where: { id: query.clusterId }, + raw: true, + attributes: ["metaData"], + }); + + // Filter data before sending to client + const start_date = moment(query.historyDateRange[0]).valueOf(); + const end_date = moment(query.historyDateRange[1]).valueOf(); + + const storageUsageHistory = data.metaData?.storageUsageHistory || {}; + + const filtered_data = {}; + + for (const key in storageUsageHistory) { + filtered_data[key] = []; + for (const item of storageUsageHistory[key]) { + if (item.date < start_date) { + break; + } + + if (item.date > end_date) { + break; + } else { + filtered_data[key].unshift(item); + } } - res.status(statusCode).json({ success: false, message: errMessage }); } -} + + res.status(200).send(filtered_data); + } catch (err) { + console.log(err); + logger.error(err); + res.status(503).json({ + success: false, + message: "Failed to fetch current cluster usage", + }); + } +}; module.exports = { addCluster, @@ -371,4 +502,6 @@ module.exports = { updateCluster, getClusterWhiteList, pingCluster, -}; \ No newline at end of file + clusterUsage, + clusterStorageHistory, +}; diff --git a/Tombolo/server/middlewares/clusterMiddleware.js b/Tombolo/server/middlewares/clusterMiddleware.js index 004901ef8..9c1010b42 100644 --- a/Tombolo/server/middlewares/clusterMiddleware.js +++ b/Tombolo/server/middlewares/clusterMiddleware.js @@ -88,7 +88,7 @@ const validateUpdateClusterInputs = [ } next(); }, -]; +]; // Validate name for blind ping const validateClusterPingPayload = [ @@ -98,16 +98,16 @@ const validateClusterPingPayload = [ .isLength({ max: 300 }) .withMessage("Name must not exceed 300 characters"), - // username - validate if present - body("username") + // username - validate if present + body("username") .optional({ nullable: true }) .isAlphanumeric() .withMessage("Username must be alphanumeric") .isLength({ max: 200 }) .withMessage("Username must not exceed 200 characters"), - // Password - validate if present - body("password") + // Password - validate if present + body("password") .optional({ nullable: true }) .isString() .withMessage("Password must be a string") @@ -123,9 +123,21 @@ const validateClusterPingPayload = [ }, ]; +const validateQueryData = [ + param("queryData").isString().withMessage("Invalid cluster query data"), + (req, res, next) => { + const errors = validationResult(req); + if (!errors.isEmpty()) { + return res.status(422).json({ success: false, errors: errors.array() }); + } + next(); + }, +]; + module.exports = { validateAddClusterInputs, validateClusterId, validateUpdateClusterInputs, validateClusterPingPayload, + validateQueryData, }; diff --git a/Tombolo/server/routes/cluster/read.js b/Tombolo/server/routes/cluster/read.js deleted file mode 100644 index 308de5196..000000000 --- a/Tombolo/server/routes/cluster/read.js +++ /dev/null @@ -1,233 +0,0 @@ -const express = require("express"); -const moment = require("moment"); -const { param, validationResult } = require("express-validator"); -const hpccJSComms = require("@hpcc-js/comms"); -const path = require("path"); -const fsPromises = require("fs/promises"); -const logger = require("../../config/logger"); -const validatorUtil = require("../../utils/validator"); -const hpccUtil = require("../../utils/hpcc-util"); -const models = require("../../models"); - -const Cluster = models.cluster; -const router = express.Router(); - -router.get( - "/currentClusterUsage/:clusterId", - [param("clusterId").isUUID().withMessage("Invalid cluster ID")], - async (req, res) => { - try { - //Check for errors - return if exists - const errors = validationResult(req).formatWith( - validatorUtil.errorFormatter - ); - - // return if error(s) exist - if (!errors.isEmpty()) - return res.status(422).json({ success: false, errors: errors.array() }); - - const { clusterId } = req.params; - - //Get cluster details - let cluster = await hpccUtil.getCluster(clusterId); // Checks if cluster is reachable and decrypts cluster credentials if any - const { thor_host, thor_port, username, hash } = cluster; - const clusterDetails = { - baseUrl: `${thor_host}:${thor_port}`, - userID: username || "", - password: hash || "", - }; - - //Use JS comms library to fetch current usage - const machineService = new hpccJSComms.MachineService(clusterDetails); - const targetClusterUsage = await machineService.GetTargetClusterUsageEx(); - - const maxUsage = targetClusterUsage.map((target) => ({ - name: target.Name, - maxUsage: target.max.toFixed(2), - meanUsage: target.mean.toFixed(2), - })); - res.status(200).send(maxUsage); - } catch (err) { - res.status(503).json({ - success: false, - message: "Failed to fetch current cluster usage", - }); - logger.error(err); - } - } -); - -router.get( - "/clusterStorageHistory/:queryData", - [param("queryData").isString().withMessage("Invalid cluster query data")], - async (req, res) => { - try { - //Check for errors - return if exists - const errors = validationResult(req).formatWith( - validatorUtil.errorFormatter - ); - - // return if error(s) exist - if (!errors.isEmpty()) - return res.status(422).json({ success: false, errors: errors.array() }); - - const { queryData } = req.params; - const query = JSON.parse(queryData); - - const data = await Cluster.findOne({ - where: { id: query.clusterId }, - raw: true, - attributes: ["metaData"], - }); - - // Filter data before sending to client - const start_date = moment(query.historyDateRange[0]).valueOf(); - const end_date = moment(query.historyDateRange[1]).valueOf(); - - const storageUsageHistory = data.metaData?.storageUsageHistory || {}; - - const filtered_data = {}; - - for (const key in storageUsageHistory) { - filtered_data[key] = []; - for (const item of storageUsageHistory[key]) { - if (item.date < start_date) { - break; - } - - if (item.date > end_date) { - break; - } else { - filtered_data[key].unshift(item); - } - } - } - res.status(200).send(filtered_data); - } catch (err) { - logger.error(err); - res.status(503).json({ - success: false, - message: "Failed to fetch current cluster usage", - }); - } - } -); - -router.get( - "/clusterStorageHistory/file/:type/:clusterId", - [param("clusterId").isUUID()], - async (req, res) => { - try { - //Check for errors - return if exists - const errors = validationResult(req).formatWith( - validatorUtil.errorFormatter - ); - - // return if error(s) exist - if (!errors.isEmpty()) - return res.status(422).json({ success: false, errors: errors.array() }); - - const { type, clusterId } = req.params; - - const data = await Cluster.findOne({ - where: { id: clusterId }, - raw: true, - attributes: ["metaData"], - }); - - const storageUsageHistory = data.metaData?.storageUsageHistory || {}; - - let output; - - if (type === "CSV") { - output = `type,date,maxUsage,meanUsage`; - - Object.keys(storageUsageHistory).forEach((type) => { - storageUsageHistory[type].map((data) => { - let d = new Date(data.date); - output += - "\n" + - type.toString() + - "," + - d.toString() + - "," + - data.maxUsage.toString() + - "," + - data.meanUsage.toString(); - }); - }); - } - - if (type === "JSON") { - Object.keys(storageUsageHistory).forEach((type) => { - storageUsageHistory[type].map((data) => { - let d = new Date(data.date); - data.date = d; - }); - }); - output = [storageUsageHistory]; - output = JSON.stringify(output); - } - - //verify type to avoid user input - if (type !== "JSON" && type !== "CSV") { - throw Error("Invalid file type"); - } - - const filePath = path.join( - __dirname, - "..", - "..", - "tempFiles", - `Tombolo-clusterUsage.${type}` - ); - - const createPromise = fsPromises.writeFile( - filePath, - output, - function (err) { - if (err) { - return console.log(err); - } - } - ); - - await createPromise; - - res.status(200).download(filePath); - } catch (err) { - logger.error(err); - res.status(503).json({ - success: false, - message: "Failed to fetch current cluster usage", - }); - } - } -); -//method for removing file after download on front-end -router.delete("/clusterStorageHistory/file/:type", async (req, res) => { - try { - const { type } = req.params; - - //verify type to avoid user input - if (type !== "JSON" && type !== "CSV") { - throw Error("Invalid file type"); - } - const filePath = path.join( - __dirname, - "..", - "..", - "tempFiles", - `Tombolo-clusterUsage.${type}` - ); - - const createPromise = fsPromises.unlink(filePath); - await createPromise; - - res.status(200).json({ message: "File Deleted" }); - } catch (error) { - res.status(500).json({ message: "Failed to delete file" }); - } -}); - -module.exports = router; diff --git a/Tombolo/server/routes/clusterRoutes.js b/Tombolo/server/routes/clusterRoutes.js index 3b40237d2..33665d8fd 100644 --- a/Tombolo/server/routes/clusterRoutes.js +++ b/Tombolo/server/routes/clusterRoutes.js @@ -6,6 +6,7 @@ const { validateClusterId, validateUpdateClusterInputs, validateClusterPingPayload, + validateQueryData, } = require("../middlewares/clusterMiddleware"); const { addCluster, @@ -16,9 +17,11 @@ const { updateCluster, getClusterWhiteList, pingCluster, + clusterUsage, + clusterStorageHistory, } = require("../controllers/clusterController"); -router.post("/ping", validateClusterPingPayload, pingCluster); // GET - Ping cluster +router.post("/ping", validateClusterPingPayload, pingCluster); // GET - Ping cluster router.get("/whiteList", getClusterWhiteList); // GET - cluster white list router.post("/", validateAddClusterInputs, addCluster); // CREATE - one cluster router.post( @@ -31,5 +34,12 @@ router.get("/:id", validateClusterId, getCluster); // GET - one cluster by id router.delete("/:id", validateClusterId, deleteCluster); // DELETE - one cluster by id router.patch("/:id", validateUpdateClusterInputs, updateCluster); // UPDATE - one cluster by id +//cluster dashboards +router.get("/currentClusterUsage/:id", validateClusterId, clusterUsage); +router.get( + "/clusterStorageHistory/:queryData", + validateQueryData, + clusterStorageHistory +); -module.exports = router; \ No newline at end of file +module.exports = router;