Statistics
| Branch: | Tag: | Revision:

root / src / Ganeti / HTools / Backend / Rapi.hs @ bf028b21

History | View | Annotate | Download (8.8 kB)

1
{-| Implementation of the RAPI client interface.
2

    
3
-}
4

    
5
{-
6

    
7
Copyright (C) 2009, 2010, 2011, 2012, 2013 Google Inc.
8

    
9
This program is free software; you can redistribute it and/or modify
10
it under the terms of the GNU General Public License as published by
11
the Free Software Foundation; either version 2 of the License, or
12
(at your option) any later version.
13

    
14
This program is distributed in the hope that it will be useful, but
15
WITHOUT ANY WARRANTY; without even the implied warranty of
16
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU
17
General Public License for more details.
18

    
19
You should have received a copy of the GNU General Public License
20
along with this program; if not, write to the Free Software
21
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
22
02110-1301, USA.
23

    
24
-}
25

    
26
{-# LANGUAGE BangPatterns, CPP #-}
27

    
28
module Ganeti.HTools.Backend.Rapi
29
  ( loadData
30
  , parseData
31
  ) where
32

    
33
import Control.Exception
34
import Data.List (isPrefixOf)
35
import Data.Maybe (fromMaybe)
36
import Network.Curl
37
import Network.Curl.Types ()
38
import Control.Monad
39
import Text.JSON (JSObject, fromJSObject, decodeStrict)
40
import Text.JSON.Types (JSValue(..))
41
import Text.Printf (printf)
42
import System.FilePath
43

    
44
import Ganeti.BasicTypes
45
import Ganeti.HTools.Loader
46
import Ganeti.HTools.Types
47
import Ganeti.JSON
48
import qualified Ganeti.HTools.Group as Group
49
import qualified Ganeti.HTools.Node as Node
50
import qualified Ganeti.HTools.Instance as Instance
51
import qualified Ganeti.Constants as C
52

    
53
{-# ANN module "HLint: ignore Eta reduce" #-}
54

    
55
-- | File method prefix.
56
filePrefix :: String
57
filePrefix = "file://"
58

    
59
-- | Read an URL via curl and return the body if successful.
60
getUrl :: (Monad m) => String -> IO (m String)
61

    
62
-- | Connection timeout (when using non-file methods).
63
connTimeout :: Long
64
connTimeout = 15
65

    
66
-- | The default timeout for queries (when using non-file methods).
67
queryTimeout :: Long
68
queryTimeout = 60
69

    
70
-- | The curl options we use.
71
curlOpts :: [CurlOption]
72
curlOpts = [ CurlSSLVerifyPeer False
73
           , CurlSSLVerifyHost 0
74
           , CurlTimeout queryTimeout
75
           , CurlConnectTimeout connTimeout
76
           ]
77

    
78
getUrl url = do
79
  (code, !body) <- curlGetString url curlOpts
80
  return (case code of
81
            CurlOK -> return body
82
            _ -> fail $ printf "Curl error for '%s', error %s"
83
                 url (show code))
84

    
85
-- | Helper to convert I/O errors in 'Bad' values.
86
ioErrToResult :: IO a -> IO (Result a)
87
ioErrToResult ioaction =
88
  Control.Exception.catch (liftM Ok ioaction)
89
    (\e -> return . Bad . show $ (e::IOException))
90

    
91
-- | Append the default port if not passed in.
92
formatHost :: String -> String
93
formatHost master =
94
  if ':' `elem` master
95
    then  master
96
    else "https://" ++ master ++ ":" ++ show C.defaultRapiPort
97

    
98
-- | Parse a instance list in JSON format.
99
getInstances :: NameAssoc
100
             -> String
101
             -> Result [(String, Instance.Instance)]
102
getInstances ktn body =
103
  loadJSArray "Parsing instance data" body >>=
104
  mapM (parseInstance ktn . fromJSObject)
105

    
106
-- | Parse a node list in JSON format.
107
getNodes :: NameAssoc -> String -> Result [(String, Node.Node)]
108
getNodes ktg body = loadJSArray "Parsing node data" body >>=
109
                    mapM (parseNode ktg . fromJSObject)
110

    
111
-- | Parse a group list in JSON format.
112
getGroups :: String -> Result [(String, Group.Group)]
113
getGroups body = loadJSArray "Parsing group data" body >>=
114
                 mapM (parseGroup . fromJSObject)
115

    
116
-- | Construct an instance from a JSON object.
117
parseInstance :: NameAssoc
118
              -> JSRecord
119
              -> Result (String, Instance.Instance)
120
parseInstance ktn a = do
121
  name <- tryFromObj "Parsing new instance" a "name"
122
  let owner_name = "Instance '" ++ name ++ "', error while parsing data"
123
  let extract s x = tryFromObj owner_name x s
124
  disk <- extract "disk_usage" a
125
  disks <- extract "disk.sizes" a
126
  beparams <- liftM fromJSObject (extract "beparams" a)
127
  omem <- extract "oper_ram" a
128
  mem <- case omem of
129
           JSRational _ _ -> annotateResult owner_name (fromJVal omem)
130
           _ -> extract "memory" beparams `mplus` extract "maxmem" beparams
131
  vcpus <- extract "vcpus" beparams
132
  pnode <- extract "pnode" a >>= lookupNode ktn name
133
  snodes <- extract "snodes" a
134
  snode <- case snodes of
135
             [] -> return Node.noSecondary
136
             x:_ -> readEitherString x >>= lookupNode ktn name
137
  running <- extract "status" a
138
  tags <- extract "tags" a
139
  auto_balance <- extract "auto_balance" beparams
140
  dt <- extract "disk_template" a
141
  su <- extract "spindle_use" beparams
142
  let inst = Instance.create name mem disk disks vcpus running tags
143
             auto_balance pnode snode dt su
144
  return (name, inst)
145

    
146
-- | Construct a node from a JSON object.
147
parseNode :: NameAssoc -> JSRecord -> Result (String, Node.Node)
148
parseNode ktg a = do
149
  name <- tryFromObj "Parsing new node" a "name"
150
  let desc = "Node '" ++ name ++ "', error while parsing data"
151
      extract s = tryFromObj desc a s
152
  offline <- extract "offline"
153
  drained <- extract "drained"
154
  vm_cap  <- annotateResult desc $ maybeFromObj a "vm_capable"
155
  let vm_cap' = fromMaybe True vm_cap
156
  ndparams <- extract "ndparams" >>= asJSObject
157
  spindles <- tryFromObj desc (fromJSObject ndparams) "spindle_count"
158
  guuid   <- annotateResult desc $ maybeFromObj a "group.uuid"
159
  guuid' <-  lookupGroup ktg name (fromMaybe defaultGroupID guuid)
160
  node <- if offline || drained || not vm_cap'
161
            then return $ Node.create name 0 0 0 0 0 0 True 0 guuid'
162
            else do
163
              mtotal  <- extract "mtotal"
164
              mnode   <- extract "mnode"
165
              mfree   <- extract "mfree"
166
              dtotal  <- extract "dtotal"
167
              dfree   <- extract "dfree"
168
              ctotal  <- extract "ctotal"
169
              return $ Node.create name mtotal mnode mfree
170
                     dtotal dfree ctotal False spindles guuid'
171
  return (name, node)
172

    
173
-- | Construct a group from a JSON object.
174
parseGroup :: JSRecord -> Result (String, Group.Group)
175
parseGroup a = do
176
  name <- tryFromObj "Parsing new group" a "name"
177
  let extract s = tryFromObj ("Group '" ++ name ++ "'") a s
178
  uuid <- extract "uuid"
179
  apol <- extract "alloc_policy"
180
  ipol <- extract "ipolicy"
181
  tags <- extract "tags"
182
  return (uuid, Group.create name uuid apol ipol tags)
183

    
184
-- | Parse cluster data from the info resource.
185
parseCluster :: JSObject JSValue -> Result ([String], IPolicy, String)
186
parseCluster obj = do
187
  let obj' = fromJSObject obj
188
      extract s = tryFromObj "Parsing cluster data" obj' s
189
  master <- extract "master"
190
  tags <- extract "tags"
191
  ipolicy <- extract "ipolicy"
192
  return (tags, ipolicy, master)
193

    
194
-- | Loads the raw cluster data from an URL.
195
readDataHttp :: String -- ^ Cluster or URL to use as source
196
             -> IO (Result String, Result String, Result String, Result String)
197
readDataHttp master = do
198
  let url = formatHost master
199
  group_body <- getUrl $ printf "%s/2/groups?bulk=1" url
200
  node_body <- getUrl $ printf "%s/2/nodes?bulk=1" url
201
  inst_body <- getUrl $ printf "%s/2/instances?bulk=1" url
202
  info_body <- getUrl $ printf "%s/2/info" url
203
  return (group_body, node_body, inst_body, info_body)
204

    
205
-- | Loads the raw cluster data from the filesystem.
206
readDataFile:: String -- ^ Path to the directory containing the files
207
             -> IO (Result String, Result String, Result String, Result String)
208
readDataFile path = do
209
  group_body <- ioErrToResult . readFile $ path </> "groups.json"
210
  node_body <- ioErrToResult . readFile $ path </> "nodes.json"
211
  inst_body <- ioErrToResult . readFile $ path </> "instances.json"
212
  info_body <- ioErrToResult . readFile $ path </> "info.json"
213
  return (group_body, node_body, inst_body, info_body)
214

    
215
-- | Loads data via either 'readDataFile' or 'readDataHttp'.
216
readData :: String -- ^ URL to use as source
217
         -> IO (Result String, Result String, Result String, Result String)
218
readData url =
219
  if filePrefix `isPrefixOf` url
220
    then readDataFile (drop (length filePrefix) url)
221
    else readDataHttp url
222

    
223
-- | Builds the cluster data from the raw Rapi content.
224
parseData :: (Result String, Result String, Result String, Result String)
225
          -> Result ClusterData
226
parseData (group_body, node_body, inst_body, info_body) = do
227
  group_data <- group_body >>= getGroups
228
  let (group_names, group_idx) = assignIndices group_data
229
  node_data <- node_body >>= getNodes group_names
230
  let (node_names, node_idx) = assignIndices node_data
231
  inst_data <- inst_body >>= getInstances node_names
232
  let (_, inst_idx) = assignIndices inst_data
233
  (tags, ipolicy, master) <- 
234
    info_body >>=
235
    (fromJResult "Parsing cluster info" . decodeStrict) >>=
236
    parseCluster
237
  node_idx' <- setMaster node_names node_idx master
238
  return (ClusterData group_idx node_idx' inst_idx tags ipolicy)
239

    
240
-- | Top level function for data loading.
241
loadData :: String -- ^ Cluster or URL to use as source
242
         -> IO (Result ClusterData)
243
loadData = fmap parseData . readData