import Network.Curl.Types ()
#endif
import Control.Monad
-import Text.JSON (JSObject, JSValue, fromJSObject, decodeStrict)
+import Text.JSON (JSObject, fromJSObject, decodeStrict)
import Text.JSON.Types (JSValue(..))
import Text.Printf (printf)
import qualified Ganeti.HTools.Group as Group
import qualified Ganeti.HTools.Node as Node
import qualified Ganeti.HTools.Instance as Instance
+import qualified Ganeti.Constants as C
-- | Read an URL via curl and return the body if successful.
getUrl :: (Monad m) => String -> IO (m String)
#else
--- | The curl options we use
+-- | The curl options we use.
curlOpts :: [CurlOption]
curlOpts = [ CurlSSLVerifyPeer False
, CurlSSLVerifyHost 0
formatHost :: String -> String
formatHost master =
if ':' `elem` master then master
- else "https://" ++ master ++ ":5080"
+ else "https://" ++ master ++ ":" ++ show C.defaultRapiPort
-- | Parse a instance list in JSON format.
getInstances :: NameAssoc
getGroups body = loadJSArray "Parsing group data" body >>=
mapM (parseGroup . fromJSObject)
+-- | Generates a fake group list.
getFakeGroups :: Result [(String, Group.Group)]
getFakeGroups =
return [(defaultGroupID,
-- | Construct an instance from a JSON object.
parseInstance :: NameAssoc
- -> [(String, JSValue)]
+ -> JSRecord
-> Result (String, Instance.Instance)
parseInstance ktn a = do
name <- tryFromObj "Parsing new instance" a "name"
- let owner_name = "Instance '" ++ name ++ "'"
+ let owner_name = "Instance '" ++ name ++ "', error while parsing data"
let extract s x = tryFromObj owner_name x s
disk <- extract "disk_usage" a
beparams <- liftM fromJSObject (extract "beparams" a)
return (name, inst)
-- | Construct a node from a JSON object.
-parseNode :: NameAssoc -> [(String, JSValue)] -> Result (String, Node.Node)
+parseNode :: NameAssoc -> JSRecord -> Result (String, Node.Node)
parseNode ktg a = do
name <- tryFromObj "Parsing new node" a "name"
- let desc = "Node '" ++ name ++ "'"
+ let desc = "Node '" ++ name ++ "', error while parsing data"
extract s = tryFromObj desc a s
offline <- extract "offline"
drained <- extract "drained"
return (name, node)
-- | Construct a group from a JSON object.
-parseGroup :: [(String, JSValue)] -> Result (String, Group.Group)
+parseGroup :: JSRecord -> Result (String, Group.Group)
parseGroup a = do
name <- tryFromObj "Parsing new group" a "name"
let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
tags_body <- getUrl $ printf "%s/2/tags" url
return (group_body, node_body, inst_body, tags_body)
--- | Builds the cluster data from the raw Rapi content
+-- | Builds the cluster data from the raw Rapi content.
parseData :: (Result String, Result String, Result String, Result String)
-> Result ClusterData
parseData (group_body, node_body, inst_body, tags_body) = do
tags_data <- tags_body >>= (fromJResult "Parsing tags data" . decodeStrict)
return (ClusterData group_idx node_idx inst_idx tags_data)
--- | Top level function for data loading
+-- | Top level function for data loading.
loadData :: String -- ^ Cluster or URL to use as source
-> IO (Result ClusterData)
loadData = fmap parseData . readData