I am currently testing DEAP for future structural optimization tasks. Using the setup from the beginner's tutorial I'm testing parallel evaluation using to optimize the rosenbrock function. To assess the benefits of parallelization I additionally do some useless but intense calculations in the objective function. Weird thing is, SCOOP and multiprocessing both seem to work (they spawn multiple python processes), but both are slower than not using parallelization. OS is Win7 x64.
import random
import numpy as np
import multiprocessing
from deap import base, creator, tools
from scoop import futures
from timeit import default_timer as timer
if __name__ == '__main__': # Protecting multiprocessing pool as indicated in the tutorial
# Create Fitness and Individual Classes
creator.create('FitnessMin', base.Fitness, weights=(-1.0,)) # Weights must be a sequence
creator.create('Individual', list, fitness=creator.FitnessMin)
# Create Individual Type
IND_SIZE = 2
toolbox = base.Toolbox()
def rand():
return random.uniform(-3,3)
toolbox.register('attr_float', rand)
toolbox.register('individual', tools.initRepeat, creator.Individual,
toolbox.attr_float, n=IND_SIZE)
# Create Population Type
toolbox.register('population', tools.initRepeat, list, toolbox.individual)
# Define Evaluation Function
def evaluate(individual):
"""Fitness evaluation. Must return a tuple even for single objective optimization.
"""
x = (individual[0])
y = (individual[1])
# Do some heavy stuff which is totally useless
n = 1873895732
import math
for i in range(10000000):
math.sqrt(n)
i = i + 1
print 'Done!'
f = (1-x)**2 + 100*(y-x**2)**2 # Actual rosenbrock fct
return (f,) # Must be an iterable
# Define Operators
toolbox.register('mate', tools.cxTwoPoint)
toolbox.register('mutate', tools.mutGaussian, mu = 0, sigma = 1, indpb = 0.1)
toolbox.register('select', tools.selTournament, tournsize=3)
toolbox.register('evaluate', evaluate)
#toolbox.register('map', futures.map) # or this line instead of the following two and using python -m scoop file.py on cmd
pool = multiprocessing.Pool()
toolbox.register('map', pool.map)
# Optimization Algorithm
def main():
"""Defines the optimization process. Should be contained in the main function.
"""
# General Settings
POP_SIZE = 20 # Population size
NGEN = 1 # Number of generations
CXPB = 0.2 # Crossover probability
MUTPB = 0.5 # Mutation probablilty
# Initialize population
pop = toolbox.population(n=POP_SIZE)
# Evaluate the entire initial population
fitnesses = map(toolbox.evaluate, pop)
# Assign the fitnesses
for ind, fit in zip(pop,fitnesses):
ind.fitness.values = fit
# Evolutionary Loop
for g in range(NGEN):
# Select the next generations individuals (len(pop) individuals)
offspring = toolbox.select(pop, len(pop))
# Clone the selected individuals
offspring = map(toolbox.clone, offspring)
# Apply crossover on the offspring
for child1, child2 in zip(offspring[::2], offspring[1::2]): # Chooses offspring with neighbouring indices to mutate
if random.random() < CXPB:
# Do crossover inplace
toolbox.mate(child1, child2)
# Request reevaluation of fitnesses
del child1.fitness.values
del child2.fitness.values
# Apply mutation on the offspring
for mutant in offspring:
if random.random() < MUTPB:
# Do mutation inplace
toolbox.mutate(mutant)
# Request reevaluation of fitness
del mutant.fitness.values
# Evaluate fitnesses of individuales with invalid fitnesses
invalid_ind = [ind for ind in offspring if not ind.fitness.valid]
fitnesses = map(toolbox.evaluate, invalid_ind)
for ind, fit in zip(invalid_ind, fitnesses):
ind.fitness.values = fit
# Replace the population by the offspring
pop[:] = offspring
return pop
pop = main()
print timer()-tic