WindowsError:[Error 2]系统找不到指定的文件(Pyspark)

2024-10-03 19:22:05 发布

您现在位置:Python中文网/ 问答频道 /正文

当我尝试使用SparkContext时出现上述错误,详细信息如下所示。我只是想导入库。同样的代码被执行,我的电脑没有改变。代码昨天起作用了,但今天我重新运行代码时给出了这个警告。在

我用水蟒在赢10。Pypark通过水蟒安装。在

from pyspark import SparkContext, SparkConf
from pyspark.sql import SQLContext
from pyspark.sql import Row
from pyspark.sql.types import *       # for datatype conversion
from pyspark.sql.functions import *   # for col() function
from pyspark.ml.linalg import DenseVector
from pyspark.ml.feature import StandardScaler

from pyspark.ml.evaluation import BinaryClassificationEvaluator

import pandas as pd

sc = SparkContext.getOrCreate()
sqlCtx = SQLContext(sc)

---------------------------------------------------------------------------     
WindowsError                              Traceback (most recent call last) <ipython-input-7-3a110463c9e6> in <module>()
     22 import pandas as pd
     23 
---> 24 sc = SparkContext.getOrCreate()
     25 sqlCtx = SQLContext(sc)

C:\Users\anors\Anaconda2\lib\site-packages\pyspark\context.pyc in getOrCreate(cls, conf)
    329         with SparkContext._lock:
    330             if SparkContext._active_spark_context is None:
--> 331                 SparkContext(conf=conf or SparkConf())
    332             return SparkContext._active_spark_context
    333 

C:\Users\anors\Anaconda2\lib\site-packages\pyspark\context.pyc in
__init__(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, conf, gateway, jsc, profiler_cls)
    113         """
    114         self._callsite = first_spark_call() or CallSite(None, None, None)
--> 115         SparkContext._ensure_initialized(self, gateway=gateway, conf=conf)
    116         try:
    117             self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,

C:\Users\anors\Anaconda2\lib\site-packages\pyspark\context.pyc in
_ensure_initialized(cls, instance, gateway, conf)
    278         with SparkContext._lock:
    279             if not SparkContext._gateway:
--> 280                 SparkContext._gateway = gateway or launch_gateway(conf)
    281                 SparkContext._jvm = SparkContext._gateway.jvm
    282 

C:\Users\anors\Anaconda2\lib\site-packages\pyspark\java_gateway.pyc in launch_gateway(conf)
     78         else:
     79             # 

Blockquote

preexec_fn not supported on Windows
---> 80             proc = Popen(command, stdin=PIPE, env=env)
     81 
     82         gateway_port = None

C:\Users\anors\Anaconda2\lib\subprocess.pyc in __init__(self, args, bufsize, executable, stdin, stdout, stderr, preexec_fn, close_fds, shell, cwd, env, universal_newlines, startupinfo, creationflags)
    388                                 p2cread, p2cwrite,
    389                                 c2pread, c2pwrite,
--> 390                                 errread, errwrite)
    391         except Exception:
    392             # Preserve original exception in case os.close raises.

C:\Users\anors\Anaconda2\lib\subprocess.pyc in _execute_child(self, args, executable, preexec_fn, close_fds, cwd, env, universal_newlines, startupinfo, creationflags, shell, to_close, p2cread, p2cwrite, c2pread, c2pwrite, errread, errwrite)
    638                                          env,
    639                                          cwd,
--> 640                                          startupinfo)
    641             except pywintypes.error, e:
    642                 # Translate pywintypes.error to WindowsError, which is

WindowsError: [Error 2] The system cannot find the file specified

Tags: infromimportselfnonelibconfcontext