{-
-Copyright (C) 2009, 2010, 2011 Google Inc.
+Copyright (C) 2009, 2010, 2011, 2012 Google Inc.
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
, parseData
) where
+import Control.Exception
+import Data.List (isPrefixOf)
import Data.Maybe (fromMaybe)
#ifndef NO_CURL
import Network.Curl
import Network.Curl.Types ()
#endif
import Control.Monad
+import Prelude hiding (catch)
import Text.JSON (JSObject, fromJSObject, decodeStrict)
import Text.JSON.Types (JSValue(..))
import Text.Printf (printf)
+import System.FilePath
import Ganeti.HTools.Loader
import Ganeti.HTools.Types
{-# ANN module "HLint: ignore Eta reduce" #-}
+-- | File method prefix.
+filePrefix :: String
+filePrefix = "file://"
+
-- | Read an URL via curl and return the body if successful.
getUrl :: (Monad m) => String -> IO (m String)
url (show code))
#endif
+-- | Helper to convert I/O errors in 'Bad' values.
+ioErrToResult :: IO a -> IO (Result a)
+ioErrToResult ioaction =
+ catch (ioaction >>= return . Ok)
+ (\e -> return . Bad . show $ (e::IOException))
+
-- | Append the default port if not passed in.
formatHost :: String -> String
formatHost master =
omem <- extract "oper_ram" a
mem <- case omem of
JSRational _ _ -> annotateResult owner_name (fromJVal omem)
- _ -> extract "memory" beparams
+ _ -> extract "memory" beparams `mplus` extract "maxmem" beparams
vcpus <- extract "vcpus" beparams
pnode <- extract "pnode" a >>= lookupNode ktn name
snodes <- extract "snodes" a
tags <- extract "tags" a
auto_balance <- extract "auto_balance" beparams
dt <- extract "disk_template" a
+ su <- extract "spindle_use" beparams
let inst = Instance.create name mem disk vcpus running tags
- auto_balance pnode snode dt
+ auto_balance pnode snode dt su
return (name, inst)
-- | Construct a node from a JSON object.
drained <- extract "drained"
vm_cap <- annotateResult desc $ maybeFromObj a "vm_capable"
let vm_cap' = fromMaybe True vm_cap
+ ndparams <- extract "ndparams" >>= asJSObject
+ spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
guuid <- annotateResult desc $ maybeFromObj a "group.uuid"
guuid' <- lookupGroup ktg name (fromMaybe defaultGroupID guuid)
node <- if offline || drained || not vm_cap'
- then return $ Node.create name 0 0 0 0 0 0 True guuid'
+ then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
else do
mtotal <- extract "mtotal"
mnode <- extract "mnode"
dfree <- extract "dfree"
ctotal <- extract "ctotal"
return $ Node.create name mtotal mnode mfree
- dtotal dfree ctotal False guuid'
+ dtotal dfree ctotal False spindles guuid'
return (name, node)
-- | Construct a group from a JSON object.
let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
uuid <- extract "uuid"
apol <- extract "alloc_policy"
- return (uuid, Group.create name uuid apol)
+ ipol <- extract "ipolicy"
+ return (uuid, Group.create name uuid apol ipol)
+
+-- | Parse cluster data from the info resource.
+parseCluster :: JSObject JSValue -> Result ([String], IPolicy)
+parseCluster obj = do
+ let obj' = fromJSObject obj
+ extract s = tryFromObj "Parsing cluster data" obj' s
+ tags <- extract "tags"
+ ipolicy <- extract "ipolicy"
+ return (tags, ipolicy)
-- | Loads the raw cluster data from an URL.
-readData :: String -- ^ Cluster or URL to use as source
- -> IO (Result String, Result String, Result String, Result String)
-readData master = do
+readDataHttp :: String -- ^ Cluster or URL to use as source
+ -> IO (Result String, Result String, Result String, Result String)
+readDataHttp master = do
let url = formatHost master
group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
- tags_body <- getUrl $ printf "%s/2/tags" url
- return (group_body, node_body, inst_body, tags_body)
+ info_body <- getUrl $ printf "%s/2/info" url
+ return (group_body, node_body, inst_body, info_body)
+
+-- | Loads the raw cluster data from the filesystem.
+readDataFile:: String -- ^ Path to the directory containing the files
+ -> IO (Result String, Result String, Result String, Result String)
+readDataFile path = do
+ group_body <- ioErrToResult $ readFile $ path </> "groups.json"
+ node_body <- ioErrToResult $ readFile $ path </> "nodes.json"
+ inst_body <- ioErrToResult $ readFile $ path </> "instances.json"
+ info_body <- ioErrToResult $ readFile $ path </> "info.json"
+ return (group_body, node_body, inst_body, info_body)
+
+-- | Loads data via either 'readDataFile' or 'readDataHttp'.
+readData :: String -- ^ URL to use as source
+ -> IO (Result String, Result String, Result String, Result String)
+readData url = do
+ if filePrefix `isPrefixOf` url
+ then readDataFile (drop (length filePrefix) url)
+ else readDataHttp url
-- | Builds the cluster data from the raw Rapi content.
parseData :: (Result String, Result String, Result String, Result String)
-> Result ClusterData
-parseData (group_body, node_body, inst_body, tags_body) = do
+parseData (group_body, node_body, inst_body, info_body) = do
group_data <- group_body >>= getGroups
let (group_names, group_idx) = assignIndices group_data
node_data <- node_body >>= getNodes group_names
let (node_names, node_idx) = assignIndices node_data
inst_data <- inst_body >>= getInstances node_names
let (_, inst_idx) = assignIndices inst_data
- tags_data <- tags_body >>= (fromJResult "Parsing tags data" . decodeStrict)
- return (ClusterData group_idx node_idx inst_idx tags_data)
+ (tags, ipolicy) <- info_body >>=
+ (fromJResult "Parsing cluster info" . decodeStrict) >>=
+ parseCluster
+ return (ClusterData group_idx node_idx inst_idx tags ipolicy)
-- | Top level function for data loading.
loadData :: String -- ^ Cluster or URL to use as source