Fix the node powered field
[ganeti-local] / lib / query.py
index a8f19f0..d041ad5 100644 (file)
@@ -64,6 +64,7 @@ from ganeti import objects
 from ganeti import ht
 from ganeti import runtime
 from ganeti import qlang
+from ganeti import jstore
 
 from ganeti.constants import (QFT_UNKNOWN, QFT_TEXT, QFT_BOOL, QFT_NUMBER,
                               QFT_UNIT, QFT_TIMESTAMP, QFT_OTHER,
@@ -100,11 +101,15 @@ from ganeti.constants import (QFT_UNKNOWN, QFT_TEXT, QFT_BOOL, QFT_NUMBER,
  CQ_QUEUE_DRAINED,
  CQ_WATCHER_PAUSE) = range(300, 303)
 
+(JQ_ARCHIVED, ) = range(400, 401)
+
 # Query field flags
 QFF_HOSTNAME = 0x01
 QFF_IP_ADDRESS = 0x02
-# Next values: 0x04, 0x08, 0x10, 0x20, 0x40, 0x80, 0x100, 0x200
-QFF_ALL = (QFF_HOSTNAME | QFF_IP_ADDRESS)
+QFF_JOB_ID = 0x04
+QFF_SPLIT_TIMESTAMP = 0x08
+# Next values: 0x10, 0x20, 0x40, 0x80, 0x100, 0x200
+QFF_ALL = (QFF_HOSTNAME | QFF_IP_ADDRESS | QFF_JOB_ID | QFF_SPLIT_TIMESTAMP)
 
 FIELD_NAME_RE = re.compile(r"^[a-z0-9/._]+$")
 TITLE_RE = re.compile(r"^[^\s]+$")
@@ -142,12 +147,6 @@ _VTToQFT = {
 
 _SERIAL_NO_DOC = "%s object serial number, incremented on each modification"
 
-# TODO: Consider moving titles closer to constants
-NDP_TITLE = {
-  constants.ND_OOB_PROGRAM: "OutOfBandProgram",
-  constants.ND_SPINDLE_COUNT: "SpindleCount",
-  }
-
 
 def _GetUnknownField(ctx, item): # pylint: disable=W0613
   """Gets the contents of an unknown field.
@@ -272,13 +271,16 @@ class _FilterHints:
     if op != qlang.OP_OR:
       self._NeedAllNames()
 
-  def NoteUnaryOp(self, op): # pylint: disable=W0613
+  def NoteUnaryOp(self, op, datakind): # pylint: disable=W0613
     """Called when handling an unary operation.
 
     @type op: string
     @param op: Operator
 
     """
+    if datakind is not None:
+      self._datakinds.add(datakind)
+
     self._NeedAllNames()
 
   def NoteBinaryOp(self, op, datakind, name, value):
@@ -345,6 +347,36 @@ def _PrepareRegex(pattern):
     raise errors.ParameterError("Invalid regex pattern (%s)" % err)
 
 
+def _PrepareSplitTimestamp(value):
+  """Prepares a value for comparison by L{_MakeSplitTimestampComparison}.
+
+  """
+  if ht.TNumber(value):
+    return value
+  else:
+    return utils.MergeTime(value)
+
+
+def _MakeSplitTimestampComparison(fn):
+  """Compares split timestamp values after converting to float.
+
+  """
+  return lambda lhs, rhs: fn(utils.MergeTime(lhs), rhs)
+
+
+def _MakeComparisonChecks(fn):
+  """Prepares flag-specific comparisons using a comparison function.
+
+  """
+  return [
+    (QFF_SPLIT_TIMESTAMP, _MakeSplitTimestampComparison(fn),
+     _PrepareSplitTimestamp),
+    (QFF_JOB_ID, lambda lhs, rhs: fn(jstore.ParseJobId(lhs), rhs),
+     jstore.ParseJobId),
+    (None, fn, None),
+    ]
+
+
 class _FilterCompilerHelper:
   """Converts a query filter to a callable usable for filtering.
 
@@ -363,7 +395,7 @@ class _FilterCompilerHelper:
 
   List of tuples containing flags and a callable receiving the left- and
   right-hand side of the operator. The flags are an OR-ed value of C{QFF_*}
-  (e.g. L{QFF_HOSTNAME}).
+  (e.g. L{QFF_HOSTNAME} or L{QFF_SPLIT_TIMESTAMP}).
 
   Order matters. The first item with flags will be used. Flags are checked
   using binary AND.
@@ -374,6 +406,8 @@ class _FilterCompilerHelper:
      lambda lhs, rhs: utils.MatchNameComponent(rhs, [lhs],
                                                case_sensitive=False),
      None),
+    (QFF_SPLIT_TIMESTAMP, _MakeSplitTimestampComparison(operator.eq),
+     _PrepareSplitTimestamp),
     (None, operator.eq, None),
     ]
 
@@ -403,18 +437,10 @@ class _FilterCompilerHelper:
     qlang.OP_NOT_EQUAL:
       (_OPTYPE_BINARY, [(flags, compat.partial(_WrapNot, fn), valprepfn)
                         for (flags, fn, valprepfn) in _EQUALITY_CHECKS]),
-    qlang.OP_LT: (_OPTYPE_BINARY, [
-      (None, operator.lt, None),
-      ]),
-    qlang.OP_GT: (_OPTYPE_BINARY, [
-      (None, operator.gt, None),
-      ]),
-    qlang.OP_LE: (_OPTYPE_BINARY, [
-      (None, operator.le, None),
-      ]),
-    qlang.OP_GE: (_OPTYPE_BINARY, [
-      (None, operator.ge, None),
-      ]),
+    qlang.OP_LT: (_OPTYPE_BINARY, _MakeComparisonChecks(operator.lt)),
+    qlang.OP_LE: (_OPTYPE_BINARY, _MakeComparisonChecks(operator.le)),
+    qlang.OP_GT: (_OPTYPE_BINARY, _MakeComparisonChecks(operator.gt)),
+    qlang.OP_GE: (_OPTYPE_BINARY, _MakeComparisonChecks(operator.ge)),
     qlang.OP_REGEXP: (_OPTYPE_BINARY, [
       (None, lambda lhs, rhs: rhs.search(lhs), _PrepareRegex),
       ]),
@@ -536,19 +562,22 @@ class _FilterCompilerHelper:
     """
     assert op_fn is None
 
-    if hints_fn:
-      hints_fn(op)
-
     if len(operands) != 1:
       raise errors.ParameterError("Unary operator '%s' expects exactly one"
                                   " operand" % op)
 
     if op == qlang.OP_TRUE:
-      (_, _, _, retrieval_fn) = self._LookupField(operands[0])
+      (_, datakind, _, retrieval_fn) = self._LookupField(operands[0])
+
+      if hints_fn:
+        hints_fn(op, datakind)
 
       op_fn = operator.truth
       arg = retrieval_fn
     elif op == qlang.OP_NOT:
+      if hints_fn:
+        hints_fn(op, None)
+
       op_fn = operator.not_
       arg = self._Compile(operands[0], level + 1)
     else:
@@ -719,6 +748,7 @@ class Query:
         (status, name) = _ProcessResult(self._name_fn(ctx, item))
         assert status == constants.RS_NORMAL
         # TODO: Are there cases where we wouldn't want to use NiceSort?
+        # Answer: if the name field is non-string...
         result.append((utils.NiceSortKey(name), idx, row))
       else:
         result.append(row)
@@ -959,7 +989,9 @@ def _BuildNDFields(is_group):
     field_kind = GQ_CONFIG
   else:
     field_kind = NQ_GROUP
-  return [(_MakeField("ndp/%s" % name, NDP_TITLE.get(name, "ndp/%s" % name),
+  return [(_MakeField("ndp/%s" % name,
+                      constants.NDS_PARAMETER_TITLES.get(name,
+                                                         "ndp/%s" % name),
                       _VTToQFT[kind], "The \"%s\" node parameter" % name),
            field_kind, 0, _GetNDParam(name))
           for name, kind in constants.NDS_PARAMETER_TYPES.items()]
@@ -1277,7 +1309,7 @@ def _BuildNodeFields():
                  constants.NR_REGULAR, constants.NR_DRAINED,
                  constants.NR_OFFLINE)
   role_doc = ("Node role; \"%s\" for master, \"%s\" for master candidate,"
-              " \"%s\" for regular, \"%s\" for a drained, \"%s\" for offline" %
+              " \"%s\" for regular, \"%s\" for drained, \"%s\" for offline" %
               role_values)
   fields.append((_MakeField("role", "Role", QFT_TEXT, role_doc), NQ_CONFIG, 0,
                  lambda ctx, node: _GetNodeRole(node, ctx.master_name)))
@@ -1727,26 +1759,6 @@ def _GetInstanceParameterFields():
   @return: List of field definitions used as input for L{_PrepareFieldList}
 
   """
-  # TODO: Consider moving titles closer to constants
-  be_title = {
-    constants.BE_AUTO_BALANCE: "Auto_balance",
-    constants.BE_MAXMEM: "ConfigMaxMem",
-    constants.BE_MINMEM: "ConfigMinMem",
-    constants.BE_VCPUS: "ConfigVCPUs",
-    }
-
-  hv_title = {
-    constants.HV_ACPI: "ACPI",
-    constants.HV_BOOT_ORDER: "Boot_order",
-    constants.HV_CDROM_IMAGE_PATH: "CDROM_image_path",
-    constants.HV_DISK_TYPE: "Disk_type",
-    constants.HV_INITRD_PATH: "Initrd_path",
-    constants.HV_KERNEL_PATH: "Kernel_path",
-    constants.HV_NIC_TYPE: "NIC_type",
-    constants.HV_PAE: "PAE",
-    constants.HV_VNC_BIND_ADDRESS: "VNC_bind_address",
-    }
-
   fields = [
     # Filled parameters
     (_MakeField("hvparams", "HypervisorParameters", QFT_OTHER,
@@ -1779,7 +1791,8 @@ def _GetInstanceParameterFields():
     return lambda ctx, _: ctx.inst_hvparams.get(name, _FS_UNAVAIL)
 
   fields.extend([
-    (_MakeField("hv/%s" % name, hv_title.get(name, "hv/%s" % name),
+    (_MakeField("hv/%s" % name,
+                constants.HVS_PARAMETER_TITLES.get(name, "hv/%s" % name),
                 _VTToQFT[kind], "The \"%s\" hypervisor parameter" % name),
      IQ_CONFIG, 0, _GetInstHvParam(name))
     for name, kind in constants.HVS_PARAMETER_TYPES.items()
@@ -1791,7 +1804,8 @@ def _GetInstanceParameterFields():
     return lambda ctx, _: ctx.inst_beparams.get(name, None)
 
   fields.extend([
-    (_MakeField("be/%s" % name, be_title.get(name, "be/%s" % name),
+    (_MakeField("be/%s" % name,
+                constants.BES_PARAMETER_TITLES.get(name, "be/%s" % name),
                 _VTToQFT[kind], "The \"%s\" backend parameter" % name),
      IQ_CONFIG, 0, _GetInstBeParam(name))
     for name, kind in constants.BES_PARAMETER_TYPES.items()
@@ -2238,14 +2252,16 @@ def _BuildJobFields():
 
   """
   fields = [
-    (_MakeField("id", "ID", QFT_TEXT, "Job ID"),
-     None, 0, lambda _, (job_id, job): job_id),
+    (_MakeField("id", "ID", QFT_NUMBER, "Job ID"),
+     None, QFF_JOB_ID, lambda _, (job_id, job): job_id),
     (_MakeField("status", "Status", QFT_TEXT, "Job status"),
      None, 0, _JobUnavail(lambda job: job.CalcStatus())),
     (_MakeField("priority", "Priority", QFT_NUMBER,
                 ("Current job priority (%s to %s)" %
                  (constants.OP_PRIO_LOWEST, constants.OP_PRIO_HIGHEST))),
      None, 0, _JobUnavail(lambda job: job.CalcPriority())),
+    (_MakeField("archived", "Archived", QFT_BOOL, "Whether job is archived"),
+     JQ_ARCHIVED, 0, lambda _, (job_id, job): job.archived),
     (_MakeField("ops", "OpCodes", QFT_OTHER, "List of all opcodes"),
      None, 0, _PerJobOp(lambda op: op.input.__getstate__())),
     (_MakeField("opresult", "OpCode_result", QFT_OTHER,
@@ -2270,20 +2286,25 @@ def _BuildJobFields():
     (_MakeField("oppriority", "OpCode_prio", QFT_OTHER,
                 "List of opcode priorities"),
      None, 0, _PerJobOp(operator.attrgetter("priority"))),
-    (_MakeField("received_ts", "Received", QFT_OTHER,
-                "Timestamp of when job was received"),
-     None, 0, _JobTimestamp(operator.attrgetter("received_timestamp"))),
-    (_MakeField("start_ts", "Start", QFT_OTHER,
-                "Timestamp of job start"),
-     None, 0, _JobTimestamp(operator.attrgetter("start_timestamp"))),
-    (_MakeField("end_ts", "End", QFT_OTHER,
-                "Timestamp of job end"),
-     None, 0, _JobTimestamp(operator.attrgetter("end_timestamp"))),
     (_MakeField("summary", "Summary", QFT_OTHER,
                 "List of per-opcode summaries"),
      None, 0, _PerJobOp(lambda op: op.input.Summary())),
     ]
 
+  # Timestamp fields
+  for (name, attr, title, desc) in [
+    ("received_ts", "received_timestamp", "Received",
+     "Timestamp of when job was received"),
+    ("start_ts", "start_timestamp", "Start", "Timestamp of job start"),
+    ("end_ts", "end_timestamp", "End", "Timestamp of job end"),
+    ]:
+    getter = operator.attrgetter(attr)
+    fields.extend([
+      (_MakeField(name, title, QFT_OTHER,
+                  "%s (tuple containing seconds and microseconds)" % desc),
+       None, QFF_SPLIT_TIMESTAMP, _JobTimestamp(getter)),
+      ])
+
   return _PrepareFieldList(fields, [])