util.py 3.37 KB
Newer Older
1 2 3 4 5
import itertools
import json
import logging
import os
import textwrap
6

7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93
from dateutil import parser as dateparser

from slapos.grid.promise.generic import GenericPromise


def iter_reverse_lines(f):
  """
    Read lines from the end of the file
  """
  f.seek(0, os.SEEK_END)
  while True:
    try:
      while f.seek(-2, os.SEEK_CUR) and f.read(1) != b'\n':
        pass
    except OSError:
      return
    pos = f.tell()
    yield f.readline()
    f.seek(pos, os.SEEK_SET)


def iter_logrotate_file_handle(path, mode='r'):
  """
    Yield successive file handles for rotated logs
    (XX.log, XX.log.1, XX.log.2, ...)
  """
  for i in itertools.count():
    path_i = path + str(i or '')
    try:
      with open(path_i, mode) as f:
        yield f
    except OSError:
      break


class JSONPromise(GenericPromise):
  def __init__(self, config):
    self.__name = config.get('name', None)
    self.__log_folder = config.get('log-folder', None)

    super(JSONPromise, self).__init__(config)
    json_log_name = os.path.splitext(self.__name)[0] + '.json.log'
    self.__json_log_file = os.path.join(self.__log_folder, json_log_name)
    self.json_logger = self.__makeJsonLogger(self.__json_log_file)

  def __makeJsonLogger(self, json_log_file):
    logger = logging.getLogger('json-logger')
    logger.setLevel(logging.INFO)
    handler = logging.FileHandler(json_log_file)
    formatter = logging.Formatter(
      '{"time": "%(asctime)s", "log_level": "%(levelname)s"'
      ', "message": "%(message)s", "data": %(data)s}'
    )
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    return logger

  def getJsonLogDataInterval(self, interval):
    """
      Get all data in the last "interval" seconds from JSON log
      Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
    """
    oldest_timestamp = None
    data_list = []
    for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'):
      for line in iter_reverse_lines(f):
        l = json.loads(line.decode().replace("'", '"'))
        timestamp = dateparser.parse(l['time'])
        data_list.append(l['data'])
        oldest_timestamp = oldest_timestamp or timestamp
        if (oldest_timestamp - timestamp).total_seconds() > interval:
          return data_list
    return data_list

  def getJsonLogLatestTimestamp(log):
    """
      Get latest timestamp from JSON log
      Reads rotated logs too (XX.log, XX.log.1, XX.log.2, ...)
    """
    for f in iter_logrotate_file_handle(self.__json_log_file, 'rb'):
      for line in iter_reverse_lines(f):
        l = json.loads(line.decode().replace("'", '"'))
        return dateparser.parse(l['time'])
    return 0

from dateutil import parser
from slapos.grid.promise.generic import GenericPromise
94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118

def tail_file(file_path, line_count=10):
  """
  Returns the last lines of file.
  """
  line_list = []
  with open(file_path) as f:
    BUFSIZ = 1024
    f.seek(0, 2)
    bytes = f.tell()
    size = line_count + 1
    block = -1
    while size > 0 and bytes > 0:
      if bytes - BUFSIZ > 0:
          # Seek back one whole BUFSIZ
          f.seek(block * BUFSIZ, 2)
          line_list.insert(0, f.read(BUFSIZ))
      else:
          f.seek(0, 0)
          # only read what was not read
          line_list.insert(0, f.read(bytes))
      line_len = line_list[0].count('\n')
      size -= line_len
      bytes -= BUFSIZ
      block -= 1
119
  return '\n'.join(''.join(line_list).splitlines()[-line_count:])