Override / Evasion Optimization (Overridden __getattribute __ / __ getattr__)

My current code uses the __getattribute __ / __ getattr__ override for quite commonly used classes. Profiling shows that overridden functions spend a considerable amount of time. I greatly simplified the code to measure the effect of performance on a pure redefinition as follows:

from timeit import default_timer

class Test(object):
    def __init__(self):
        self.a = 1.0

class Test1(object):
    def __init__(self):
        self.a = 1.0

    def __getattribute__(self, item):
        return object.__getattribute__(self, item)

class Test2(object):
    def __init__(self):
        self.a = 1.0

    def __getattribute__(self, item):
        return super(Test2, self).__getattribute__(item)

class ObjectWrapper(object):
    def __init__(self, ocls, obj=None):
        self.__ocls = ocls
        self.__obj = None
        if obj is None or isinstance(obj, ocls):
            self.__obj = obj
        else:
            raise RuntimeError(''.join(str(x) for x in ("The value is ", type(obj), \
                ", but it must be None or instance of ", self.__ocls.__name__)))

    def set_inner_object(self, obj):
        if obj is None or isinstance(obj, self.__ocls):
            self.__obj = obj
        else:
            raise RuntimeError(''.join(str(x) for x in ("The value is ", type(obj), \
                ", but it must be None or instance of ", self.__ocls.__name__)))

    def __getattr__(self, name):
        return getattr(self.__obj, name)

def main():
    x = Test()
    x1 = Test1()
    x2 = Test2()
    xw = ObjectWrapper(Test, x)

    accsum = 0.0
    accsum1 = 0.0
    accsum2 = 0.0
    accsumw = 0.0

    s = default_timer()
    for i in xrange(0, 100000000):
        accsum += x.a
    e = default_timer()

    s1 = default_timer()
    for i in xrange(0, 100000000):
        accsum1 += x1.a
    e1 = default_timer()

    s2 = default_timer()
    for i in xrange(0, 100000000):
        accsum2 += x2.a
    e2 = default_timer()

    sw = default_timer()
    for i in xrange(0, 100000000):
        accsumw += xw.a
    ew = default_timer()

    print "speed factor Test1/Test: ", (e1 - s1) / (e - s)
    print "speed factor Test2/Test: ", (e2 - s2) / (e - s)
    print "speed factor Test wrapped/Test: ", (ew - sw) / (e - s)

if __name__ == '__main__':
    main()

The effect of redefinition is huge. Here is the result:

speed factor Test1/Test:  6.32820892871
speed factor Test2/Test:  8.4176175507
speed factor Test wrapped/Test:  11.6202852701

Is this normal for Python (python is not my mom's programming language) and why? Is this the price of revaluation and / or fine ?!

How to increase productivity?

+4
source share
1 answer

, , . Python - , -.

- :

>>> dis.dis(Test2.__getattribute__)
  5           0 LOAD_GLOBAL              0 (super)
              3 LOAD_GLOBAL              1 (Test2)
              6 LOAD_FAST                0 (self)
              9 CALL_FUNCTION            2 (2 positional, 0 keyword pair)
             12 LOAD_ATTR                2 (__getattribute__)
             15 LOAD_FAST                1 (item)
             18 CALL_FUNCTION            1 (1 positional, 0 keyword pair)
             21 RETURN_VALUE

, LOAD_GLOBAL -, - LOAD_ATTR ( ). , . , , , .

0

All Articles