recently added:

ABC music

links (this site):

home
email me
show source

links (other sites):

buienradar
python

Hippos Technical Systems BV - Larry Myerscough


These pages contain information about my family, my company, my hobbies and myself.
hide surround


python source of 'phileas_project/show_python_source.py'

#!/usr/bin/python
# -*- encoding: utf8 -*-
from __future__ import print_function
from phileas import _html40 as h
from htdocs.orderedPage import OrderedPage, main
import os,tokenize, keyword
try:
    from io.StringIO import StringIO
except ImportError:
    from cStringIO import StringIO

class SourcePage(OrderedPage):

    colors = {'NUMBER'  : 'blue',
              'STRING'  : 'brown',
              'COMMENT' : 'red',
              'DEF'     : 'green',
              'EXPORTER': 'dark blue',
              'KEYWORD' : 'purple'}

    bgcolor = 'white'

    def title(self, script_filename=()):
        return os.path.split(script_filename[0])[1]
        
    def lowerBanner(self, script_filename=()):
        return ("python source of '%s'" %
                                script_filename[0])
    
    def lowerText(self, script_filename=()):
        src =open(script_filename[0]).read()

        # store line offsets (first line is 1)
        lines = [None, 0]
        pos = 0
        while True:
            pos = src.find('\n', pos) + 1
            if not pos: break
            lines.append(pos)

        pos = 0
        prev_tok_is_from = prev_tok_is_def = False

        pre = h.pre(style="background-color:%s;padding:10px;border-style:solid" % (self.bgcolor))
        #print ("pre0", pre)
        for (tok_type, tok_str, (srow, scol), (erow, ecol), line) \
                in tokenize.generate_tokens(StringIO(src).readline):

            # calculate new position
            newpos = lines[srow] + scol

            # check for special "tokens"
            iskeyword = tok_type==tokenize.NAME and keyword.iskeyword(tok_str)
            isdefname = tok_type==tokenize.NAME and prev_tok_is_def
            isexporter= tok_type==tokenize.NAME and prev_tok_is_from

            # write text that's been skipped by tokenizer
            if newpos > pos:
                pre |= (src[pos:newpos])
                pos = newpos

            # get the correct color
            if iskeyword:
                color = self.colors.get('KEYWORD')
            elif isdefname:
                color = self.colors.get('DEF')
            elif isexporter:
                color = self.colors.get('EXPORTER')
            else:
                color = self.colors.get(tokenize.tok_name[tok_type])

            # calculate position
            pos += len(tok_str)
            prev_tok_is_def = iskeyword and tok_str in ['def', 'class']
            prev_tok_is_from = iskeyword and tok_str in ['from',]
            # escape token and write it out
            #escaped = cgi.escape(tok_str)
            if isexporter:
                try:
                    _imp = __import__(tok_str)
                    py_src = _imp.__file__[:-1] 
                    if os.path.exists(py_src):
                        tok_str = h.a(href=
"/show_python_source?script_filename=%s" % py_src
                        ) | tok_str
                except:
                    pass
            style = color and "color:%s" %(color)
            #print ("pre1", pre)
            pre |= (h.span(style=style) | tok_str)
            #print ("pre2", pre)c
        return pre

if __name__=="__main__":
        main(SourcePage)
This Panel on the Right is used for local navigation - not yet fully implemented as you can see!