Statistics
| Branch: | Tag: | Revision:

root / src / Ganeti / HTools / Backend / Rapi.hs @ 267bc1f4

History | View | Annotate | Download (8.8 kB)

1
{-| Implementation of the RAPI client interface.
2

    
3
-}
4

    
5
{-
6

    
7
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
8

    
9
This program is free software; you can redistribute it and/or modify
10
it under the terms of the GNU General Public License as published by
11
the Free Software Foundation; either version 2 of the License, or
12
(at your option) any later version.
13

    
14
This program is distributed in the hope that it will be useful, but
15
WITHOUT ANY WARRANTY; without even the implied warranty of
16
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
General Public License for more details.
18

    
19
You should have received a copy of the GNU General Public License
20
along with this program; if not, write to the Free Software
21
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
22
02110-1301, USA.
23

    
24
-}
25

    
26
{-# LANGUAGE BangPatterns, CPP #-}
27

    
28
module Ganeti.HTools.Backend.Rapi
29
  ( loadData
30
  , parseData
31
  ) where
32

    
33
import Control.Exception
34
import Data.List (isPrefixOf)
35
import Data.Maybe (fromMaybe)
36
import Network.Curl
37
import Network.Curl.Types ()
38
import Control.Monad
39
import Text.JSON (JSObject, fromJSObject, decodeStrict)
40
import Text.JSON.Types (JSValue(..))
41
import Text.Printf (printf)
42
import System.FilePath
43

    
44
import Ganeti.BasicTypes
45
import Ganeti.HTools.Loader
46
import Ganeti.HTools.Types
47
import Ganeti.JSON
48
import qualified Ganeti.HTools.Group as Group
49
import qualified Ganeti.HTools.Node as Node
50
import qualified Ganeti.HTools.Instance as Instance
51
import qualified Ganeti.Constants as C
52

    
53
{-# ANN module "HLint: ignore Eta reduce" #-}
54

    
55
-- | File method prefix.
56
filePrefix :: String
57
filePrefix = "file://"
58

    
59
-- | Read an URL via curl and return the body if successful.
60
getUrl :: (Monad m) => String -> IO (m String)
61

    
62
-- | Connection timeout (when using non-file methods).
63
connTimeout :: Long
64
connTimeout = 15
65

    
66
-- | The default timeout for queries (when using non-file methods).
67
queryTimeout :: Long
68
queryTimeout = 60
69

    
70
-- | The curl options we use.
71
curlOpts :: [CurlOption]
72
curlOpts = [ CurlSSLVerifyPeer False
73
           , CurlSSLVerifyHost 0
74
           , CurlTimeout queryTimeout
75
           , CurlConnectTimeout connTimeout
76
           ]
77

    
78
getUrl url = do
79
  (code, !body) <- curlGetString url curlOpts
80
  return (case code of
81
            CurlOK -> return body
82
            _ -> fail $ printf "Curl error for '%s', error %s"
83
                 url (show code))
84

    
85
-- | Helper to convert I/O errors in 'Bad' values.
86
ioErrToResult :: IO a -> IO (Result a)
87
ioErrToResult ioaction =
88
  Control.Exception.catch (liftM Ok ioaction)
89
    (\e -> return . Bad . show $ (e::IOException))
90

    
91
-- | Append the default port if not passed in.
92
formatHost :: String -> String
93
formatHost master =
94
  if ':' `elem` master
95
    then  master
96
    else "https://" ++ master ++ ":" ++ show C.defaultRapiPort
97

    
98
-- | Parse a instance list in JSON format.
99
getInstances :: NameAssoc
100
             -> String
101
             -> Result [(String, Instance.Instance)]
102
getInstances ktn body =
103
  loadJSArray "Parsing instance data" body >>=
104
  mapM (parseInstance ktn . fromJSObject)
105

    
106
-- | Parse a node list in JSON format.
107
getNodes :: NameAssoc -> String -> Result [(String, Node.Node)]
108
getNodes ktg body = loadJSArray "Parsing node data" body >>=
109
                    mapM (parseNode ktg . fromJSObject)
110

    
111
-- | Parse a group list in JSON format.
112
getGroups :: String -> Result [(String, Group.Group)]
113
getGroups body = loadJSArray "Parsing group data" body >>=
114
                 mapM (parseGroup . fromJSObject)
115

    
116
-- | Construct an instance from a JSON object.
117
parseInstance :: NameAssoc
118
              -> JSRecord
119
              -> Result (String, Instance.Instance)
120
parseInstance ktn a = do
121
  name <- tryFromObj "Parsing new instance" a "name"
122
  let owner_name = "Instance '" ++ name ++ "', error while parsing data"
123
  let extract s x = tryFromObj owner_name x s
124
  disk <- extract "disk_usage" a
125
  disks <- extract "disk.sizes" a
126
  beparams <- liftM fromJSObject (extract "beparams" a)
127
  omem <- extract "oper_ram" a
128
  mem <- case omem of
129
           JSRational _ _ -> annotateResult owner_name (fromJVal omem)
130
           _ -> extract "memory" beparams `mplus` extract "maxmem" beparams
131
  vcpus <- extract "vcpus" beparams
132
  pnode <- extract "pnode" a >>= lookupNode ktn name
133
  snodes <- extract "snodes" a
134
  snode <- case snodes of
135
             [] -> return Node.noSecondary
136
             x:_ -> readEitherString x >>= lookupNode ktn name
137
  running <- extract "status" a
138
  tags <- extract "tags" a
139
  auto_balance <- extract "auto_balance" beparams
140
  dt <- extract "disk_template" a
141
  su <- extract "spindle_use" beparams
142
  let inst = Instance.create name mem disk disks vcpus running tags
143
             auto_balance pnode snode dt su
144
  return (name, inst)
145

    
146
-- | Construct a node from a JSON object.
147
parseNode :: NameAssoc -> JSRecord -> Result (String, Node.Node)
148
parseNode ktg a = do
149
  name <- tryFromObj "Parsing new node" a "name"
150
  let desc = "Node '" ++ name ++ "', error while parsing data"
151
      extract s = tryFromObj desc a s
152
  offline <- extract "offline"
153
  drained <- extract "drained"
154
  vm_cap  <- annotateResult desc $ maybeFromObj a "vm_capable"
155
  let vm_cap' = fromMaybe True vm_cap
156
  ndparams <- extract "ndparams" >>= asJSObject
157
  spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
158
  guuid   <- annotateResult desc $ maybeFromObj a "group.uuid"
159
  guuid' <-  lookupGroup ktg name (fromMaybe defaultGroupID guuid)
160
  node <- if offline || drained || not vm_cap'
161
            then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
162
            else do
163
              mtotal  <- extract "mtotal"
164
              mnode   <- extract "mnode"
165
              mfree   <- extract "mfree"
166
              dtotal  <- extract "dtotal"
167
              dfree   <- extract "dfree"
168
              ctotal  <- extract "ctotal"
169
              tags    <- extract "tags"
170
              return . flip Node.setNodeTags tags $
171
                Node.create name mtotal mnode mfree dtotal dfree ctotal False
172
                            spindles guuid'
173
  return (name, node)
174

    
175
-- | Construct a group from a JSON object.
176
parseGroup :: JSRecord -> Result (String, Group.Group)
177
parseGroup a = do
178
  name <- tryFromObj "Parsing new group" a "name"
179
  let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
180
  uuid <- extract "uuid"
181
  apol <- extract "alloc_policy"
182
  ipol <- extract "ipolicy"
183
  tags <- extract "tags"
184
  return (uuid, Group.create name uuid apol ipol tags)
185

    
186
-- | Parse cluster data from the info resource.
187
parseCluster :: JSObject JSValue -> Result ([String], IPolicy, String)
188
parseCluster obj = do
189
  let obj' = fromJSObject obj
190
      extract s = tryFromObj "Parsing cluster data" obj' s
191
  master <- extract "master"
192
  tags <- extract "tags"
193
  ipolicy <- extract "ipolicy"
194
  return (tags, ipolicy, master)
195

    
196
-- | Loads the raw cluster data from an URL.
197
readDataHttp :: String -- ^ Cluster or URL to use as source
198
             -> IO (Result String, Result String, Result String, Result String)
199
readDataHttp master = do
200
  let url = formatHost master
201
  group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
202
  node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
203
  inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
204
  info_body <- getUrl $ printf "%s/2/info" url
205
  return (group_body, node_body, inst_body, info_body)
206

    
207
-- | Loads the raw cluster data from the filesystem.
208
readDataFile:: String -- ^ Path to the directory containing the files
209
             -> IO (Result String, Result String, Result String, Result String)
210
readDataFile path = do
211
  group_body <- ioErrToResult . readFile $ path </> "groups.json"
212
  node_body <- ioErrToResult . readFile $ path </> "nodes.json"
213
  inst_body <- ioErrToResult . readFile $ path </> "instances.json"
214
  info_body <- ioErrToResult . readFile $ path </> "info.json"
215
  return (group_body, node_body, inst_body, info_body)
216

    
217
-- | Loads data via either 'readDataFile' or 'readDataHttp'.
218
readData :: String -- ^ URL to use as source
219
         -> IO (Result String, Result String, Result String, Result String)
220
readData url =
221
  if filePrefix `isPrefixOf` url
222
    then readDataFile (drop (length filePrefix) url)
223
    else readDataHttp url
224

    
225
-- | Builds the cluster data from the raw Rapi content.
226
parseData :: (Result String, Result String, Result String, Result String)
227
          -> Result ClusterData
228
parseData (group_body, node_body, inst_body, info_body) = do
229
  group_data <- group_body >>= getGroups
230
  let (group_names, group_idx) = assignIndices group_data
231
  node_data <- node_body >>= getNodes group_names
232
  let (node_names, node_idx) = assignIndices node_data
233
  inst_data <- inst_body >>= getInstances node_names
234
  let (_, inst_idx) = assignIndices inst_data
235
  (tags, ipolicy, master) <- 
236
    info_body >>=
237
    (fromJResult "Parsing cluster info" . decodeStrict) >>=
238
    parseCluster
239
  node_idx' <- setMaster node_names node_idx master
240
  return (ClusterData group_idx node_idx' inst_idx tags ipolicy)
241

    
242
-- | Top level function for data loading.
243
loadData :: String -- ^ Cluster or URL to use as source
244
         -> IO (Result ClusterData)
245
loadData = fmap parseData . readData