Ok, that makes sense. Still having an issue though. The vm I'm launching spark-shell from has 2g mem.
I see from the UI -> cluster summary that my workers are in fact using 14g. However it also seems to launch the spark-shell with this param too(!). My shell becomes unusable because it only has 2g mem (see the error output at the bottom). Is this the correct behavior for spark-shell currently? If it helps my spark-env.sh has:
where <MEM> is 2g on the master and 14g on the slaves. If I call spark-shell without specifying a SPARK_MEM my workers only use 512mb.
scala> [WARN] Failed to query stty columnsjava.io.IOException: Cannot run program "sh": java.io.IOException: error=12, Cannot allocate memory
at java.lang.ProcessBuilder.start(ProcessBuilder.java:475)
at java.lang.Runtime.exec(Runtime.java:610)
at java.lang.Runtime.exec(Runtime.java:483)
at scala.tools.jline.internal.TerminalLineSettings.exec(TerminalLineSettings.java:178)
at scala.tools.jline.internal.TerminalLineSettings.exec(TerminalLineSettings.java:168)
at scala.tools.jline.internal.TerminalLineSettings.stty(TerminalLineSettings.java:163)
at scala.tools.jline.internal.TerminalLineSettings.get(TerminalLineSettings.java:67)
at scala.tools.jline.internal.TerminalLineSettings.getProperty(TerminalLineSettings.java:87)
at scala.tools.jline.UnixTerminal.getWidth(UnixTerminal.java:94)
at scala.tools.jline.console.ConsoleReader.drawBuffer(ConsoleReader.java:582)
at scala.tools.jline.console.ConsoleReader.drawBuffer(ConsoleReader.java:601)
at scala.tools.jline.console.ConsoleReader.putChar(ConsoleReader.java:540)
at scala.tools.jline.console.ConsoleReader.readLine(ConsoleReader.java:1430)
at scala.tools.jline.console.ConsoleReader.readLine(ConsoleReader.java:1161)
at spark.repl.SparkJLineReader.readOneLine(SparkJLineReader.scala:72)
at scala.tools.nsc.interpreter.InteractiveReader$class.readLine(InteractiveReader.scala:44)
at spark.repl.SparkJLineReader.readLine(SparkJLineReader.scala:19)
at spark.repl.SparkILoop.readOneLine$1(SparkILoop.scala:564)
at spark.repl.SparkILoop.loop(SparkILoop.scala:576)
at spark.repl.SparkILoop.process(SparkILoop.scala:879)
at spark.repl.SparkILoop.process(SparkILoop.scala:894)
at spark.repl.Main$.main(Main.scala:14)
at spark.repl.Main.main(Main.scala)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:616)
at scala.tools.nsc.util.ScalaClassLoader$$anonfun$run$1.apply(ScalaClassLoader.scala:78)
at scala.tools.nsc.util.ScalaClassLoader$class.asContext(ScalaClassLoader.scala:24)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.asContext(ScalaClassLoader.scala:88)
at scala.tools.nsc.util.ScalaClassLoader$class.run(ScalaClassLoader.scala:78)
at scala.tools.nsc.util.ScalaClassLoader$URLClassLoader.run(ScalaClassLoader.scala:101)
at scala.tools.nsc.ObjectRunner$.run(ObjectRunner.scala:33)
at scala.tools.nsc.ObjectRunner$.runAndCatch(ObjectRunner.scala:40)
at scala.tools.nsc.MainGenericRunner.runTarget$1(MainGenericRunner.scala:56)
at scala.tools.nsc.MainGenericRunner.process(MainGenericRunner.scala:80)
at scala.tools.nsc.MainGenericRunner$.main(MainGenericRunner.scala:89)
at scala.tools.nsc.MainGenericRunner.main(MainGenericRunner.scala)
Caused by: java.io.IOException: java.io.IOException: error=12, Cannot allocate memory
at java.lang.UNIXProcess.<init>(UNIXProcess.java:164)
at java.lang.ProcessImpl.start(ProcessImpl.java:81)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:468)
... 37 more