Enter code here...
import pandas as pd
import numpy as np
import random
from pulp import LpInteger, LpVariable, LpProblem, lpSum, LpMinimize, LpStatus
# Test data set
s = 20
m1 = list(np.random.choice(10, size=s, replace=True))
dist1 = list(np.random.choice(1000, size=s, replace=True))
dist2 = list(np.random.choice(1000, size=s, replace=True))
ms = pd.DataFrame(
{'index': m1,
'dist1' : dist1,
'dist2' : dist2
})
def objective_function(row, x1, x2):
index = ms["index"].ix[row]
dist1 = ms["dist1"].ix[row]
dist2 = ms["dist2"].ix[row]
#score = index*(x1 + x2) + (dist1*x1 + dist2*x2)
#score = index*(x1 + x2)/(dist1*x1 + dist2*x2)
score = index*(x1 + x2)/ ( 1 + np.exp(dist1*x1 + dist2*x2))
return score
rowset = list(np.arange(len(ms)))
min_rows = 5
max_rows = 5
# Define variable
x1 = LpVariable.dicts('res1', rowset, lowBound = 0, upBound = 1, cat = LpInteger)
x2 = LpVariable.dicts('res2', rowset, lowBound = 0, upBound = 1, cat = LpInteger)
# Define model
prob = LpProblem("Model", LpMinimize)
# Input objective function
prob += lpSum([objective_function(row, x1[row], x2[row]) for row in rowset])
# Input constraints to model
# The algorithm should turn on at least the minimum number of rows
prob += lpSum([x1[row] for row in rowset]) \
>= min_rows, "Minimum_rows_set1"
# The algorithm should turn on at least the minimum number of rows
prob += lpSum([x2[row] for row in rowset]) \
>= min_rows, "Minimum_rows_set2"
for row in rowset:
prob += lpSum([x1[row] + x2[row]]) \
<= 1, "Choice_of_resources_%s"%row
# Solve the optimization problem
prob.solve()
# Print chosen rows
print ("Rows for set1:")
for row in rowset:
if x1[row].value():
print(row)
print ("Rows for set2:")
for row in rowset:
if x2[row].value():
print(row)