Hi Guys,
I am trying to create one ML model using pyspark, but It shows me the below error.
---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
~/home/spark/spark/python/pyspark/ml/param/__init__.py in _set(self, **kwargs)
    438                 try:
--> 439                     value = p.typeConverter(value)
    440                 except TypeError as e:
~/home/spark/spark/python/pyspark/ml/param/__init__.py in toListString(value)
    156                 return [TypeConverters.toString(v) for v in value]
--> 157         raise TypeError("Could not convert %s to list of strings" % value)
    158
TypeError: Could not convert DataFrame[R&D Spend: double, Administration: double, Marketing Spend: double, State_Florida: bigint, State_New York: bigint] to list of strings
During handling of the above exception, another exception occurred:
TypeError                                 Traceback (most recent call last)
<ipython-input-46-e14adf296775> in <module>
----> 1 assembler = VectorAssembler(inputCols=features,outputCol="features")
~/home/spark/spark/python/pyspark/__init__.py in wrapper(self, *args, **kwargs)
    108             raise TypeError("Method %s forces keyword arguments." % func.__name__)
    109         self._input_kwargs = kwargs
--> 110         return func(self, **kwargs)
    111     return wrapper
    112
~/home/spark/spark/python/pyspark/ml/feature.py in __init__(self, inputCols, outputCol, handleInvalid)
   2795         self._setDefault(handleInvalid="error")
   2796         kwargs = self._input_kwargs
-> 2797         self.setParams(**kwargs)
   2798
   2799     @keyword_only
~/home/spark/spark/python/pyspark/__init__.py in wrapper(self, *args, **kwargs)
    108             raise TypeError("Method %s forces keyword arguments." % func.__name__)
    109         self._input_kwargs = kwargs
--> 110         return func(self, **kwargs)
    111     return wrapper
    112
~/home/spark/spark/python/pyspark/ml/feature.py in setParams(self, inputCols, outputCol, handleInvalid)
   2805         """
   2806         kwargs = self._input_kwargs
-> 2807         return self._set(**kwargs)
   2808
   2809
~/home/spark/spark/python/pyspark/ml/param/__init__.py in _set(self, **kwargs)
    439                     value = p.typeConverter(value)
    440                 except TypeError as e:
--> 441                     raise TypeError('Invalid param value given for param "%s". %s' % (p.name, e))
    442             self._paramMap[p] = value
    443         return self
TypeError: Invalid param value given for param "inputCols". Could not convert DataFrame[R&D Spend: double, Administration: double, Marketing Spend: double, State_Florida: bigint, State_New York: bigint] to list of strings
How can I solve this?