log_source.py
author Tero Marttila <terom@fixme.fi>
Mon, 09 Feb 2009 23:49:57 +0200
changeset 73 5a7188bf2894
parent 72 5ade0288f2ec
child 76 cc3ab2c39ded
permissions -rw-r--r--
split defined configuration constants into config, and implement search result pagination
"""
    A source of IRC log files
"""

import datetime, calendar, itertools
import os, errno
import pytz

class LogSource (object) :
    """
        A collection of IRC logs for a specific target in some format. Provides the possibility to read specific events
    """
    
    def get_latest (self, count) :
        """
            Yield the latest events, up to `count` of them.
        """

        abstract
    
    def get_date (self, dt) :
        """
            Get logs for the given date (as a datetime)
        """

        abstract
    
    def get_month_days (self, dt) :
        """
            Get a set of dates, telling which days in the given month (as a datetime) have logs available
        """

        abstract
    
class LogFile (object) :
    """
        A file containing LogEvents

        XXX: modify to implement LogSource?
    """

    def __init__ (self, path, parser, charset, start_date=None, sep='\n') :
        """
            Open the file at the given path, which contains data with the given charset, as lines separated by the
            given separator. Lines are parsed using the given parser, using the given date as an initial date, see
            LogParser for more info. XXX: currently we assume start_date also for the end of the file
        """
        
        # store
        self.path = path
        self.parser = parser
        self.start_date = start_date
        self.charset = charset
        self.sep = sep

        # open
        self.file = open(path, 'rb')
    
    def __iter__ (self) :
        """
            Yields a series of unicode lines, as read from the top of the file
        """
        
        # seek to beginning
        self.file.seek(0)

        # iterate over lines, decoding them as well
        return (line.decode(self.charset).rstrip(self.sep) for line in self.file)
    
    def read_full (self) :
        """
            Reads all LogLines. The LogLines will have a valid offset
        """
        
        # just use our __iter__
        return self.parser.parse_lines(self, self.start_date, starting_offset=1)

    def read_from (self, dt) :
        """
            Reads all LogLines from the given naive timestamp onwards
        """
        
        # start reading at beginning
        events = self.read_full()
        
        # skip unwanted events
        for event in events :
            if event.timestamp < dt :
                continue

            else :
                # include this line as well
                yield event
                break
        
        # yield the rest as-is
        for event in events :
            yield event

    def read_until (self, dt) :
        """
            Reads all LogLines up until the given naive timestamp
        """

        # start reading events at the beginning
        events = self.read_full()

        # yield events until we hit the given timestamp
        for event in events :
            if event.timestamp <= dt :
                yield event

            else :
                break
            
        # ignore the rest
        return

    def _read_blocks_reverse (self, blocksize=1024) :
        """
            Yields blocks of file data in reverse order, starting at the end of the file
        """

        # seek to end of file
        self.file.seek(0, os.SEEK_END)

        # read offset
        # XXX: hack -1 to get rid of trailing newline
        size = offset = self.file.tell() - 1
        
        # do not try to read past the beginning of the file
        while offset > 0:
            # calc new offset + size
            if offset > blocksize :
                # full block
                offset -= blocksize
                read_size = blocksize

            else :
                # partial block
                read_size = offset
                offset = 0

            # seek to offset
            self.file.seek(offset)

            # read the data we want
            block = self.file.read(read_size)

            # sanity check
            assert len(block) == read_size

            # yield 
            yield block
    
    def _read_lines_reverse (self) :
        """
            Yields decoded lines from the end of the file, in reverse order.
        """

        # partial lines
        buf = ''
        
        # read from end of file, a block at a time
        for block in self._read_blocks_reverse() :
            # add in our previous buf
            buf = block + buf
            
            # split up lines
            lines = buf.split(self.sep)

            # keep the first one as our buffer, as it's incomplete
            buf = lines[0]
           
            # yield the rest a line at a time in reverse order... this looks weird, but that's how slicing works :)
            # XXX: use something like islice, this has to build a slice object
            for line in lines[:0:-1] :
                yield line.decode(self.charset)

    def read_latest (self, count) :
        """
            Returns up to count events, from the end of the file, or less, if the file doesn't contain that many lines.
        """

        # the list of lines
        lines = []

        # start reading lines into lines
        for line in self._read_lines_reverse() :
            # append
            lines.append(line)

            # done?
            if len(lines) >= count :
                break
        
        # decode in reverse order, using our starting date....
        # XXX: use lines[::-1] or reversed?
        # XXX: it may make more sense to parse in reverse order, using 'self.end_date' or something like that
        return self.parser.parse_lines(reversed(lines), self.start_date)

class LogDirectory (LogSource) :
    """
        A directory containing a series of timestamped LogFiles
    """

    def __init__ (self, path, tz, parser, charset, filename_fmt) :
        """
            Load the logfiles at the given path.
            
            The files contain data in the given charset, and are named according the the date in the given timezone and
            date format, and will be parsed using the given parser.
        """

        # store
        self.path = path
        self.tz = tz
        self.parser = parser
        self.charset = charset
        self.filename_fmt = filename_fmt

    def _get_logfile_datetime (self, dt) :
        """
            Get the logfile corresponding to the given datetime
        """

        # convert to target timezone
        dtz = dt.astimezone(self.tz)
        
        # convert to date and use that
        return self._get_logfile_date(dtz.date())

    def _get_logfile_date (self, d, load=True) :
        """
            Get the logfile corresponding to the given naive date in our timezone. If load is False, only test for the
            presence of the logfile, do not actually open it.

            Returns None if the logfile does not exist.
        """

        # format filename
        filename = d.strftime(self.filename_fmt)

        # build path
        path = os.path.join(self.path, filename)
        
        try :
            if load :
                # open+return the LogFile
                return LogFile(path, self.parser, self.charset, d)
            
            else :
                # test
                return os.path.exists(path)

        # XXX: move to LogFile
        except IOError, e :
            # return None for missing files
            if e.errno == errno.ENOENT :
                return None

            else :
                raise
    
    def _iter_date_reverse (self, dt=None) :
        """
            Yields an infinite series of naive date objects in our timezone, iterating backwards in time starting at the
            given *datetime*, or the the current date, if none given
        """
        
        # default to now
        if not dt :
            dt = datetime.datetime.now(pytz.utc)
        
        # convert to target timezone
        dtz = dt.astimezone(self.tz)

        # our timedelta
        ONE_DAY = datetime.timedelta(1)
        
        # iterate unto infinity
        while True :
            # yield
            yield dtz.date()
            
            # one day sdrawkcab
            dtz -= ONE_DAY
    
    def _iter_logfile_reverse (self, dt=None, max_files=100) :
        """
            Yields a series of LogFile objects, iterating backwards in time starting at the given datetime, or the
            current date, if none given.

            Reads/probes at most max_files files.
        """
        
        # start counting at zero...
        file_count = 0

        # iterate backwards over days
        for day in self._iter_date_reverse(dt) :
            # stop if we've handled enough files by now
            if file_count > max_files :
                break
            
            # try and open the next logfile
            logfile = None
            
            file_count += 1
            logfile = self._get_logfile_date(day)
            
            # no logfile there?
            if not logfile :
                # if we didn't find any logfiles at all, terminate rudely
                if file_count > max_files :
                    raise Exception("No recent logfiles found")
                
                else :
                    # skip to next day
                    continue
            
            # yield it
            yield logfile

    def get_latest (self, count) :
        """
            Uses _iter_backwards + _get_logfile_date to read the yield the given lines from as many logfiles as needed
        """

        # iterate over logfiles
        iter = self._iter_logfile_reverse()
        
        # read the events into here
        lines = []
        
        # loop until done
        while len(lines) < count :
            # next logfile
            logfile = iter.next()

            # read the events
            # XXX: use a queue
            lines = list(logfile.read_latest(count)) + lines
        
        # return the events
        return lines

    def get_date (self, dt) :
        """
            A 'day' is considered to be a 24-hour period from 00:00:00 23:59:59. If the timezone of the given datetime
            differs from our native datetime, this may involve lines from more than one logfile.
        """

        # begin/end of 24h period, in target timezone
        dtz_begin = dt.replace(hour=0, minute=0, second=0).astimezone(self.tz)
        dtz_end = dt.replace(hour=23, minute=59, second=59, microsecond=999999).astimezone(self.tz)

        # as dates
        d_begin = dtz_begin.date() 
        d_end = dtz_end.date()
        
#        print
#        print "LogDirectory.get_date - %s" % dt
#        print "\t   %s %s" % (d_begin, dtz_begin)
#        print "\t-> %s %s" % (d_end, dtz_end)

        # if they're the same, just pull the full log for that date
        if d_begin == d_end :
            # open that log
            logfile = self._get_logfile_date(d_begin)
            
            if not logfile :
                raise Exception("No logfile for date=%r" % (dt, ))
            
            # return the full data
            return logfile.read_full()
        
        # otherwise, we need to pull two partial logs
        else :
            # open both of them
            f_begin = self._get_logfile_date(d_begin)
            f_end = self._get_logfile_date(d_end)

            # chain together the two sources
            return itertools.chain(
                f_begin.read_from(dtz_begin), 
                f_end.read_until(dtz_end) if f_end else []
            )

    def get_month_days (self, month) :
        """
            Returns a set of dates for which logfiles are available in the given datetime's month
        """
        
        # the set of days
        days = set()
        
        # iterate over month's days using Calendar
        for date in calendar.Calendar().itermonthdates(month.year, month.month) :
            # convert date to target datetime
            dtz = month.tzinfo.localize(datetime.datetime.combine(date, datetime.time(0))).astimezone(self.tz)

            # date in our target timezone
            log_date = dtz.date()
            
            # test for it
            if self._get_logfile_date(log_date, load=False) :
                # add to set
                days.add(date)

        # return set
        return days