{-
-Copyright (C) 2009, 2010, 2011 Google Inc.
+Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
, parseData
) where
+import Control.Exception
+import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
#ifndef NO_CURL
import Network.Curl
import Network.Curl.Types ()
#endif
import Control.Monad
+import Prelude hiding (catch)
import Text.JSON (JSObject, fromJSObject, decodeStrict)
import Text.JSON.Types (JSValue(..))
import Text.Printf (printf)
+import System.FilePath
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
{-# ANN module "HLint: ignore Eta reduce" #-}
+-- | File method prefix.
+filePrefix :: String
+filePrefix = "file://"
+
-- | Read an URL via curl and return the body if successful.
getUrl :: (Monad m) => String -> IO (m String)
url (show code))
#endif
+-- | Helper to convert I/O errors in 'Bad' values.
+ioErrToResult :: IO a -> IO (Result a)
+ioErrToResult ioaction =
+ catch (liftM Ok ioaction)
+ (\e -> return . Bad . show $ (e::IOException))
+
-- | Append the default port if not passed in.
formatHost :: String -> String
formatHost master =
tags <- extract "tags" a
auto_balance <- extract "auto_balance" beparams
dt <- extract "disk_template" a
+ su <- extract "spindle_use" beparams
let inst = Instance.create name mem disk vcpus running tags
- auto_balance pnode snode dt
+ auto_balance pnode snode dt su
return (name, inst)
-- | Construct a node from a JSON object.
drained <- extract "drained"
vm_cap <- annotateResult desc $ maybeFromObj a "vm_capable"
let vm_cap' = fromMaybe True vm_cap
+ ndparams <- extract "ndparams" >>= asJSObject
+ spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
guuid <- annotateResult desc $ maybeFromObj a "group.uuid"
guuid' <- lookupGroup ktg name (fromMaybe defaultGroupID guuid)
node <- if offline || drained || not vm_cap'
- then return $ Node.create name 0 0 0 0 0 0 True guuid'
+ then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
else do
mtotal <- extract "mtotal"
mnode <- extract "mnode"
dfree <- extract "dfree"
ctotal <- extract "ctotal"
return $ Node.create name mtotal mnode mfree
- dtotal dfree ctotal False guuid'
+ dtotal dfree ctotal False spindles guuid'
return (name, node)
-- | Construct a group from a JSON object.
return (tags, ipolicy)
-- | Loads the raw cluster data from an URL.
-readData :: String -- ^ Cluster or URL to use as source
- -> IO (Result String, Result String, Result String, Result String)
-readData master = do
+readDataHttp :: String -- ^ Cluster or URL to use as source
+ -> IO (Result String, Result String, Result String, Result String)
+readDataHttp master = do
let url = formatHost master
group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
info_body <- getUrl $ printf "%s/2/info" url
return (group_body, node_body, inst_body, info_body)
+-- | Loads the raw cluster data from the filesystem.
+readDataFile:: String -- ^ Path to the directory containing the files
+ -> IO (Result String, Result String, Result String, Result String)
+readDataFile path = do
+ group_body <- ioErrToResult . readFile $ path </> "groups.json"
+ node_body <- ioErrToResult . readFile $ path </> "nodes.json"
+ inst_body <- ioErrToResult . readFile $ path </> "instances.json"
+ info_body <- ioErrToResult . readFile $ path </> "info.json"
+ return (group_body, node_body, inst_body, info_body)
+
+-- | Loads data via either 'readDataFile' or 'readDataHttp'.
+readData :: String -- ^ URL to use as source
+ -> IO (Result String, Result String, Result String, Result String)
+readData url = do
+ if filePrefix `isPrefixOf` url
+ then readDataFile (drop (length filePrefix) url)
+ else readDataHttp url
+
-- | Builds the cluster data from the raw Rapi content.
parseData :: (Result String, Result String, Result String, Result String)
-> Result ClusterData