This file is indexed.

/usr/lib/python2.7/dist-packages/Cheetah/Tests/Performance.py is in python-cheetah 2.4.4-4.

This file is owned by root:root, with mode 0o644.

The actual contents of the file can be viewed below.

  1
  2
  3
  4
  5
  6
  7
  8
  9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
#!/usr/bin/env python

import hotshot
import hotshot.stats
import os
import sys
import unittest

from test import pystone
import time

import Cheetah.NameMapper 
import Cheetah.Template

# This can be turned on with the `--debug` flag when running the test
# and will cause the tests to all just dump out how long they took
# insteasd of asserting on duration
DEBUG = False

# TOLERANCE in Pystones
kPS = 1000
TOLERANCE = 0.5*kPS 

class DurationError(AssertionError):
    pass

_pystone_calibration_mark = None
def _pystone_calibration():
    global _pystone_calibration_mark
    if not _pystone_calibration_mark:
        _pystone_calibration_mark = pystone.pystones(loops=pystone.LOOPS)
    return _pystone_calibration_mark

def perftest(max_num_pystones, current_pystone=None):
    '''
        Performance test decorator based off the 'timedtest' 
        decorator found in this Active State recipe:
            http://code.activestate.com/recipes/440700/
    '''
    if not isinstance(max_num_pystones, float):
        max_num_pystones = float(max_num_pystones)

    if not current_pystone:
        current_pystone = _pystone_calibration()

    def _test(function):
        def wrapper(*args, **kw):
            start_time = time.time()
            try:
                return function(*args, **kw)
            finally:
                total_time = time.time() - start_time
                if total_time == 0:
                    pystone_total_time = 0
                else:
                    pystone_rate = current_pystone[0] / current_pystone[1]
                    pystone_total_time = total_time / pystone_rate
                global DEBUG
                if DEBUG:
                    print('The test "%s" took: %s pystones' % (function.func_name,
                        pystone_total_time))
                else:
                    if pystone_total_time > (max_num_pystones + TOLERANCE):
                        raise DurationError((('Test too long (%.2f Ps, '
                                        'need at most %.2f Ps)')
                                        % (pystone_total_time,
                                            max_num_pystones)))
        return wrapper
    return _test


class DynamicTemplatePerformanceTest(unittest.TestCase):
    loops = 10
    #@perftest(1200)
    def test_BasicDynamic(self):
        template = '''
            #def foo(arg1, arg2)
                #pass
            #end def
        '''
        for i in range(self.loops):
            klass = Cheetah.Template.Template.compile(template)
            assert klass
    test_BasicDynamic = perftest(1200)(test_BasicDynamic)

class PerformanceTest(unittest.TestCase):
    iterations = 100000
    display = False
    save = False

    def runTest(self):
        self.prof = hotshot.Profile('%s.prof' % self.__class__.__name__)
        self.prof.start()
        for i in range(self.iterations):
            if hasattr(self, 'performanceSample'):
                self.display = True
                self.performanceSample()
        self.prof.stop()
        self.prof.close()
        if self.display:
            print('>>> %s (%d iterations) ' % (self.__class__.__name__,
                    self.iterations))
            stats = hotshot.stats.load('%s.prof' % self.__class__.__name__)
            #stats.strip_dirs()
            stats.sort_stats('time', 'calls')
            stats.print_stats(50)

        if not self.save:
            os.unlink('%s.prof' % self.__class__.__name__)

class DynamicMethodCompilationTest(PerformanceTest):
    def performanceSample(self):
        template = '''
            #import sys
            #import os
            #def testMethod()
                #set foo = [1, 2, 3, 4]
                #return $foo[0]
            #end def
        '''
        template = Cheetah.Template.Template.compile(template, 
            keepRefToGeneratedCode=False)
        template = template()
        value = template.testMethod()


class BunchOfWriteCalls(PerformanceTest):
    iterations = 1000
    def performanceSample(self):
        template = '''
            #import sys
            #import os
            #for i in range(1000)
                $i
            #end for
        '''
        template = Cheetah.Template.Template.compile(template, 
            keepRefToGeneratedCode=False)
        template = template()
        value = template.respond()
        del value

class DynamicSimpleCompilationTest(PerformanceTest):
    def performanceSample(self):
        template = '''
            #import sys
            #import os
            #set foo = [1,2,3,4]

            Well hello there! This is basic.

            Here's an array too: $foo
        '''
        template = Cheetah.Template.Template.compile(template, 
            keepRefToGeneratedCode=False)
        template = template()
        template = unicode(template)


class FilterTest(PerformanceTest):
    template = None
    def setUp(self):
        super(FilterTest, self).setUp()
        template = '''
            #import sys
            #import os
            #set foo = [1, 2, 3, 4]

            $foo, $foo, $foo
        '''
        template = Cheetah.Template.Template.compile(template, 
            keepRefToGeneratedCode=False)
        self.template = template()

    def performanceSample(self):
        value = unicode(self.template)


class LongCompileTest(PerformanceTest):
    ''' Test the compilation on a sufficiently large template '''
    def compile(self, template):
        return Cheetah.Template.Template.compile(template, keepRefToGeneratedCode=False)

    def performanceSample(self):
        template = '''
            #import sys
            #import Cheetah.Template

            #extends Cheetah.Template.Template

            #def header()
                <center><h2>This is my header</h2></center>
            #end def
            
            #def footer()
                #return "Huzzah"
            #end def

            #def scripts()
                #pass
            #end def

            #def respond()
                <html>
                    <head>
                        <title>${title}</title>
                        
                        $scripts()
                    </head>
                    <body>
                        $header()

                        #for $i in $range(10)
                            This is just some stupid page!
                            <br/>
                        #end for

                        <br/>
                        $footer()
                    </body>
                    </html>
            #end def
            
        '''
        return self.compile(template)

class LongCompile_CompilerSettingsTest(LongCompileTest):
    def compile(self, template):
        return Cheetah.Template.Template.compile(template, keepRefToGeneratedCode=False,
            compilerSettings={'useStackFrames' : True, 'useAutocalling' : True})

class LongCompileAndRun(LongCompileTest):
    def performanceSample(self):
        template = super(LongCompileAndRun, self).performanceSample()
        template = template(searchList=[{'title' : 'foo'}])
        template = template.respond()
            

if __name__ == '__main__':
    if '--debug' in sys.argv:
        DEBUG = True
        sys.argv = [arg for arg in sys.argv if not arg == '--debug']
    unittest.main()