I don't know why this code has "Runtime Error", please help me.

The detail information about the error is empty, so I don't know what problem it is.

My idea is as follows. Get every two point and calculate the line, then use dict to store the index of point for each same line. I use the set to avoid the same index.

The line is represented by y = a1 / a2 * x + b1 / b2. I use fraction to avoid the precision problem.

Here is my code. Thank you

```
# Definition for a point
# class Point:
# def __init__(self, a=0, b=0):
# self.x = a
# self.y = b
# y = a1/a2 * x + b1/b2 or x = c
# c to label whether it is vertical line
class Solution:
def gcd(self, a, b):
return (a if b == 0 else self.gcd(b, a%b))
def parameters(self, pt1, pt2):
if pt1.x == pt2.x:
c = pt1.x
return (0,0,0,0,c)
else:
a1, a2 = pt1.y-pt2.y, pt1.x-pt2.x
gcd = self.gcd(a1,a2)
a1, a2 = a1 / gcd, a2 / gcd
b1, b2 = a2*pt1.y - a1*pt1.x, a2
gcd = self.gcd(b1,b2)
b1, b2 = b1 / gcd, b2 / gcd
return (a1,a2,b1,b2,0)
# @param points, a list of Points
# @return an integer
def maxPoints(self, points):
if len(points) == 1:
return 1
lineDict = dict()
for i in xrange(len(points)):
for j in xrange(i+1,len(points)):
pt1,pt2 = points[i],points[j]
params = self.parameters(pt1, pt2)
if params not in lineDict:
lineDict[params] = set()
lineDict[params].add(i)
lineDict[params].add(j)
maxSum = 0
for (key,value) in lineDict.items():
if len(value) > maxSum:
maxSum = len(value)
return maxSum
```