Revision be1ddd09 lib/rapi/rlib2.py

b/lib/rapi/rlib2.py
62 62
from ganeti import rapi
63 63
from ganeti import ht
64 64
from ganeti import compat
65
from ganeti import ssconf
65 66
from ganeti.rapi import baserlib
66 67

  
67 68

  
......
179 180
  """/2/info resource.
180 181

  
181 182
  """
182
  @staticmethod
183
  def GET():
183
  def GET(self):
184 184
    """Returns cluster information.
185 185

  
186 186
    """
187
    client = baserlib.GetClient()
187
    client = self.GetClient()
188 188
    return client.QueryClusterInfo()
189 189

  
190 190

  
......
204 204
  """/2/os resource.
205 205

  
206 206
  """
207
  @staticmethod
208
  def GET():
207
  def GET(self):
209 208
    """Return a list of all OSes.
210 209

  
211 210
    Can return error 500 in case of a problem.
......
213 212
    Example: ["debian-etch"]
214 213

  
215 214
    """
216
    cl = baserlib.GetClient()
215
    cl = self.GetClient()
217 216
    op = opcodes.OpOsDiagnose(output_fields=["name", "variants"], names=[])
218
    job_id = baserlib.SubmitJob([op], cl)
217
    job_id = self.SubmitJob([op], cl=cl)
219 218
    # we use custom feedback function, instead of print we log the status
220 219
    result = cli.PollJob(job_id, cl, feedback_fn=baserlib.FeedbackFn)
221 220
    diagnose_data = result[0]
......
234 233
  """/2/redistribute-config resource.
235 234

  
236 235
  """
237
  @staticmethod
238
  def PUT():
236
  def PUT(self):
239 237
    """Redistribute configuration to all nodes.
240 238

  
241 239
    """
242
    return baserlib.SubmitJob([opcodes.OpClusterRedistConf()])
240
    return self.SubmitJob([opcodes.OpClusterRedistConf()])
243 241

  
244 242

  
245 243
class R_2_cluster_modify(baserlib.R_Generic):
......
255 253
    op = baserlib.FillOpcode(opcodes.OpClusterSetParams, self.request_body,
256 254
                             None)
257 255

  
258
    return baserlib.SubmitJob([op])
256
    return self.SubmitJob([op])
259 257

  
260 258

  
261 259
class R_2_jobs(baserlib.R_Generic):
......
268 266
    @return: a dictionary with jobs id and uri.
269 267

  
270 268
    """
271
    client = baserlib.GetClient()
269
    client = self.GetClient()
272 270

  
273 271
    if self.useBulk():
274 272
      bulkdata = client.QueryJobs(None, J_FIELDS_BULK)
......
297 295

  
298 296
    """
299 297
    job_id = self.items[0]
300
    result = baserlib.GetClient().QueryJobs([job_id, ], J_FIELDS)[0]
298
    result = self.GetClient().QueryJobs([job_id, ], J_FIELDS)[0]
301 299
    if result is None:
302 300
      raise http.HttpNotFound()
303 301
    return baserlib.MapFields(J_FIELDS, result)
......
307 305

  
308 306
    """
309 307
    job_id = self.items[0]
310
    result = baserlib.GetClient().CancelJob(job_id)
308
    result = self.GetClient().CancelJob(job_id)
311 309
    return result
312 310

  
313 311

  
......
341 339
      raise http.HttpBadRequest("The 'previous_log_serial' parameter should"
342 340
                                " be a number")
343 341

  
344
    client = baserlib.GetClient()
342
    client = self.GetClient()
345 343
    result = client.WaitForJobChangeOnce(job_id, fields,
346 344
                                         prev_job_info, prev_log_serial,
347 345
                                         timeout=_WFJC_TIMEOUT)
......
368 366
    """Returns a list of all nodes.
369 367

  
370 368
    """
371
    client = baserlib.GetClient()
369
    client = self.GetClient()
372 370

  
373 371
    if self.useBulk():
374 372
      bulkdata = client.QueryNodes([], N_FIELDS, False)
......
389 387

  
390 388
    """
391 389
    node_name = self.items[0]
392
    client = baserlib.GetClient()
390
    client = self.GetClient()
393 391

  
394 392
    result = baserlib.HandleItemQueryErrors(client.QueryNodes,
395 393
                                            names=[node_name], fields=N_FIELDS,
......
409 407

  
410 408
    """
411 409
    node_name = self.items[0]
412
    client = baserlib.GetClient()
410
    client = self.GetClient()
413 411
    result = client.QueryNodes(names=[node_name], fields=["role"],
414 412
                               use_locking=self.useLocking())
415 413

  
......
453 451
                                 drained=drained,
454 452
                                 force=bool(self.useForce()))
455 453

  
456
    return baserlib.SubmitJob([op])
454
    return self.SubmitJob([op])
457 455

  
458 456

  
459 457
class R_2_nodes_name_evacuate(baserlib.R_Generic):
......
469 467
      "dry_run": self.dryRun(),
470 468
      })
471 469

  
472
    return baserlib.SubmitJob([op])
470
    return self.SubmitJob([op])
473 471

  
474 472

  
475 473
class R_2_nodes_name_migrate(baserlib.R_Generic):
......
506 504
      "node_name": node_name,
507 505
      })
508 506

  
509
    return baserlib.SubmitJob([op])
507
    return self.SubmitJob([op])
510 508

  
511 509

  
512 510
class R_2_nodes_name_storage(baserlib.R_Generic):
......
532 530
    op = opcodes.OpNodeQueryStorage(nodes=[node_name],
533 531
                                    storage_type=storage_type,
534 532
                                    output_fields=output_fields.split(","))
535
    return baserlib.SubmitJob([op])
533
    return self.SubmitJob([op])
536 534

  
537 535

  
538 536
class R_2_nodes_name_storage_modify(baserlib.R_Generic):
......
562 560
                                     storage_type=storage_type,
563 561
                                     name=name,
564 562
                                     changes=changes)
565
    return baserlib.SubmitJob([op])
563
    return self.SubmitJob([op])
566 564

  
567 565

  
568 566
class R_2_nodes_name_storage_repair(baserlib.R_Generic):
......
585 583
    op = opcodes.OpRepairNodeStorage(node_name=node_name,
586 584
                                     storage_type=storage_type,
587 585
                                     name=name)
588
    return baserlib.SubmitJob([op])
586
    return self.SubmitJob([op])
589 587

  
590 588

  
591 589
def _ParseCreateGroupRequest(data, dry_run):
......
615 613
    """Returns a list of all node groups.
616 614

  
617 615
    """
618
    client = baserlib.GetClient()
616
    client = self.GetClient()
619 617

  
620 618
    if self.useBulk():
621 619
      bulkdata = client.QueryGroups([], G_FIELDS, False)
......
634 632
    """
635 633
    baserlib.CheckType(self.request_body, dict, "Body contents")
636 634
    op = _ParseCreateGroupRequest(self.request_body, self.dryRun())
637
    return baserlib.SubmitJob([op])
635
    return self.SubmitJob([op])
638 636

  
639 637

  
640 638
class R_2_groups_name(baserlib.R_Generic):
......
646 644

  
647 645
    """
648 646
    group_name = self.items[0]
649
    client = baserlib.GetClient()
647
    client = self.GetClient()
650 648

  
651 649
    result = baserlib.HandleItemQueryErrors(client.QueryGroups,
652 650
                                            names=[group_name], fields=G_FIELDS,
......
661 659
    op = opcodes.OpGroupRemove(group_name=self.items[0],
662 660
                               dry_run=bool(self.dryRun()))
663 661

  
664
    return baserlib.SubmitJob([op])
662
    return self.SubmitJob([op])
665 663

  
666 664

  
667 665
def _ParseModifyGroupRequest(name, data):
......
690 688

  
691 689
    op = _ParseModifyGroupRequest(self.items[0], self.request_body)
692 690

  
693
    return baserlib.SubmitJob([op])
691
    return self.SubmitJob([op])
694 692

  
695 693

  
696 694
def _ParseRenameGroupRequest(name, data, dry_run):
......
726 724
    baserlib.CheckType(self.request_body, dict, "Body contents")
727 725
    op = _ParseRenameGroupRequest(self.items[0], self.request_body,
728 726
                                  self.dryRun())
729
    return baserlib.SubmitJob([op])
727
    return self.SubmitJob([op])
730 728

  
731 729

  
732 730
class R_2_groups_name_assign_nodes(baserlib.R_Generic):
......
745 743
      "force": self.useForce(),
746 744
      })
747 745

  
748
    return baserlib.SubmitJob([op])
746
    return self.SubmitJob([op])
749 747

  
750 748

  
751 749
def _ParseInstanceCreateRequestVersion1(data, dry_run):
......
776 774
    """Returns a list of all available instances.
777 775

  
778 776
    """
779
    client = baserlib.GetClient()
777
    client = self.GetClient()
780 778

  
781 779
    use_locking = self.useLocking()
782 780
    if self.useBulk():
......
812 810
      raise http.HttpBadRequest("Unsupported request data version %s" %
813 811
                                data_version)
814 812

  
815
    return baserlib.SubmitJob([op])
813
    return self.SubmitJob([op])
816 814

  
817 815

  
818 816
class R_2_instances_name(baserlib.R_Generic):
......
823 821
    """Send information about an instance.
824 822

  
825 823
    """
826
    client = baserlib.GetClient()
824
    client = self.GetClient()
827 825
    instance_name = self.items[0]
828 826

  
829 827
    result = baserlib.HandleItemQueryErrors(client.QueryInstances,
......
840 838
    op = opcodes.OpInstanceRemove(instance_name=self.items[0],
841 839
                                  ignore_failures=False,
842 840
                                  dry_run=bool(self.dryRun()))
843
    return baserlib.SubmitJob([op])
841
    return self.SubmitJob([op])
844 842

  
845 843

  
846 844
class R_2_instances_name_info(baserlib.R_Generic):
......
856 854

  
857 855
    op = opcodes.OpInstanceQueryData(instances=[instance_name],
858 856
                                     static=static)
859
    return baserlib.SubmitJob([op])
857
    return self.SubmitJob([op])
860 858

  
861 859

  
862 860
class R_2_instances_name_reboot(baserlib.R_Generic):
......
881 879
                                  ignore_secondaries=ignore_secondaries,
882 880
                                  dry_run=bool(self.dryRun()))
883 881

  
884
    return baserlib.SubmitJob([op])
882
    return self.SubmitJob([op])
885 883

  
886 884

  
887 885
class R_2_instances_name_startup(baserlib.R_Generic):
......
905 903
                                   dry_run=bool(self.dryRun()),
906 904
                                   no_remember=no_remember)
907 905

  
908
    return baserlib.SubmitJob([op])
906
    return self.SubmitJob([op])
909 907

  
910 908

  
911 909
def _ParseShutdownInstanceRequest(name, data, dry_run, no_remember):
......
940 938
    op = _ParseShutdownInstanceRequest(self.items[0], self.request_body,
941 939
                                       bool(self.dryRun()), no_remember)
942 940

  
943
    return baserlib.SubmitJob([op])
941
    return self.SubmitJob([op])
944 942

  
945 943

  
946 944
def _ParseInstanceReinstallRequest(name, data):
......
997 995

  
998 996
    ops = _ParseInstanceReinstallRequest(self.items[0], body)
999 997

  
1000
    return baserlib.SubmitJob(ops)
998
    return self.SubmitJob(ops)
1001 999

  
1002 1000

  
1003 1001
def _ParseInstanceReplaceDisksRequest(name, data):
......
1037 1035
    """
1038 1036
    op = _ParseInstanceReplaceDisksRequest(self.items[0], self.request_body)
1039 1037

  
1040
    return baserlib.SubmitJob([op])
1038
    return self.SubmitJob([op])
1041 1039

  
1042 1040

  
1043 1041
class R_2_instances_name_activate_disks(baserlib.R_Generic):
......
1056 1054
    op = opcodes.OpInstanceActivateDisks(instance_name=instance_name,
1057 1055
                                         ignore_size=ignore_size)
1058 1056

  
1059
    return baserlib.SubmitJob([op])
1057
    return self.SubmitJob([op])
1060 1058

  
1061 1059

  
1062 1060
class R_2_instances_name_deactivate_disks(baserlib.R_Generic):
......
1071 1069

  
1072 1070
    op = opcodes.OpInstanceDeactivateDisks(instance_name=instance_name)
1073 1071

  
1074
    return baserlib.SubmitJob([op])
1072
    return self.SubmitJob([op])
1075 1073

  
1076 1074

  
1077 1075
class R_2_instances_name_prepare_export(baserlib.R_Generic):
......
1090 1088
    op = opcodes.OpBackupPrepare(instance_name=instance_name,
1091 1089
                                 mode=mode)
1092 1090

  
1093
    return baserlib.SubmitJob([op])
1091
    return self.SubmitJob([op])
1094 1092

  
1095 1093

  
1096 1094
def _ParseExportInstanceRequest(name, data):
......
1126 1124

  
1127 1125
    op = _ParseExportInstanceRequest(self.items[0], self.request_body)
1128 1126

  
1129
    return baserlib.SubmitJob([op])
1127
    return self.SubmitJob([op])
1130 1128

  
1131 1129

  
1132 1130
def _ParseMigrateInstanceRequest(name, data):
......
1155 1153

  
1156 1154
    op = _ParseMigrateInstanceRequest(self.items[0], self.request_body)
1157 1155

  
1158
    return baserlib.SubmitJob([op])
1156
    return self.SubmitJob([op])
1159 1157

  
1160 1158

  
1161 1159
class R_2_instances_name_failover(baserlib.R_Generic):
......
1174 1172
      "instance_name": self.items[0],
1175 1173
      })
1176 1174

  
1177
    return baserlib.SubmitJob([op])
1175
    return self.SubmitJob([op])
1178 1176

  
1179 1177

  
1180 1178
def _ParseRenameInstanceRequest(name, data):
......
1203 1201

  
1204 1202
    op = _ParseRenameInstanceRequest(self.items[0], self.request_body)
1205 1203

  
1206
    return baserlib.SubmitJob([op])
1204
    return self.SubmitJob([op])
1207 1205

  
1208 1206

  
1209 1207
def _ParseModifyInstanceRequest(name, data):
......
1232 1230

  
1233 1231
    op = _ParseModifyInstanceRequest(self.items[0], self.request_body)
1234 1232

  
1235
    return baserlib.SubmitJob([op])
1233
    return self.SubmitJob([op])
1236 1234

  
1237 1235

  
1238 1236
class R_2_instances_name_disk_grow(baserlib.R_Generic):
......
1250 1248
      "disk": int(self.items[1]),
1251 1249
      })
1252 1250

  
1253
    return baserlib.SubmitJob([op])
1251
    return self.SubmitJob([op])
1254 1252

  
1255 1253

  
1256 1254
class R_2_instances_name_console(baserlib.R_Generic):
......
1266 1264
             L{objects.InstanceConsole}
1267 1265

  
1268 1266
    """
1269
    client = baserlib.GetClient()
1267
    client = self.GetClient()
1270 1268

  
1271 1269
    ((console, ), ) = client.QueryInstances([self.items[0]], ["console"], False)
1272 1270

  
......
1304 1302
  GET_ACCESS = [rapi.RAPI_ACCESS_WRITE]
1305 1303

  
1306 1304
  def _Query(self, fields, filter_):
1307
    return baserlib.GetClient().Query(self.items[0], fields, filter_).ToDict()
1305
    return self.GetClient().Query(self.items[0], fields, filter_).ToDict()
1308 1306

  
1309 1307
  def GET(self):
1310 1308
    """Returns resource information.
......
1349 1347
    else:
1350 1348
      fields = _SplitQueryFields(raw_fields[0])
1351 1349

  
1352
    return baserlib.GetClient().QueryFields(self.items[0], fields).ToDict()
1350
    return self.GetClient().QueryFields(self.items[0], fields).ToDict()
1353 1351

  
1354 1352

  
1355 1353
class _R_Tags(baserlib.R_Generic):
......
1388 1386
      if not self.name:
1389 1387
        raise http.HttpBadRequest("Missing name on tag request")
1390 1388

  
1391
      cl = baserlib.GetClient()
1389
      cl = self.GetClient()
1392 1390
      if kind == constants.TAG_INSTANCE:
1393 1391
        fn = cl.QueryInstances
1394 1392
      elif kind == constants.TAG_NODEGROUP:
......
1421 1419
                                " the 'tag' parameter")
1422 1420
    op = opcodes.OpTagsSet(kind=self.TAG_LEVEL, name=self.name,
1423 1421
                           tags=self.queryargs["tag"], dry_run=self.dryRun())
1424
    return baserlib.SubmitJob([op])
1422
    return self.SubmitJob([op])
1425 1423

  
1426 1424
  def DELETE(self):
1427 1425
    """Delete a tag.
......
1438 1436
                                " tag(s) using the 'tag' parameter")
1439 1437
    op = opcodes.OpTagsDel(kind=self.TAG_LEVEL, name=self.name,
1440 1438
                           tags=self.queryargs["tag"], dry_run=self.dryRun())
1441
    return baserlib.SubmitJob([op])
1439
    return self.SubmitJob([op])
1442 1440

  
1443 1441

  
1444 1442
class R_2_instances_name_tags(_R_Tags):

Also available in: Unified diff