Unfortunately, the Hadoop dependency on Guava 11 (which does not mention the Futures.withFallback method) is a long-standing problem, and indeed Hadoop 2.7.1 is still dependent on Guava 11 .
The Spark core uses Guava 14, as can be seen here, but this can be circumvented by shading Guava inside the Spark assembly:
$ jar tf /usr/lib/spark/lib/spark-assembly.jar | grep concurrent.Futures org/spark-project/guava/util/concurrent/Futures$1.class org/spark-project/guava/util/concurrent/Futures$2.class org/spark-project/guava/util/concurrent/Futures$3.class org/spark-project/guava/util/concurrent/Futures$4.class org/spark-project/guava/util/concurrent/Futures$5.class org/spark-project/guava/util/concurrent/Futures$6.class org/spark-project/guava/util/concurrent/Futures$ChainingListenableFuture$1.class org/spark-project/guava/util/concurrent/Futures$ChainingListenableFuture.class org/spark-project/guava/util/concurrent/Futures$CombinedFuture$1.class org/spark-project/guava/util/concurrent/Futures$CombinedFuture$2.class org/spark-project/guava/util/concurrent/Futures$CombinedFuture.class org/spark-project/guava/util/concurrent/Futures$FallbackFuture$1$1.class org/spark-project/guava/util/concurrent/Futures$FallbackFuture$1.class org/spark-project/guava/util/concurrent/Futures$FallbackFuture.class org/spark-project/guava/util/concurrent/Futures$FutureCombiner.class org/spark-project/guava/util/concurrent/Futures$ImmediateCancelledFuture.class org/spark-project/guava/util/concurrent/Futures$ImmediateFailedCheckedFuture.class org/spark-project/guava/util/concurrent/Futures$ImmediateFailedFuture.class org/spark-project/guava/util/concurrent/Futures$ImmediateFuture.class org/spark-project/guava/util/concurrent/Futures$ImmediateSuccessfulCheckedFuture.class org/spark-project/guava/util/concurrent/Futures$ImmediateSuccessfulFuture.class org/spark-project/guava/util/concurrent/Futures$MappingCheckedFuture.class org/spark-project/guava/util/concurrent/Futures.class $ javap -cp /usr/lib/spark/lib/spark-assembly.jar org.spark-project.guava.util.concurrent.Futures Compiled from "Futures.java" public final class org.spark-project.guava.util.concurrent.Futures { public static <V, X extends java.lang.Exception> org.spark-project.guava.util.concurrent.CheckedFuture<V, X> makeChecked(org.spark-project.guava.util.concurrent.ListenableFuture<V>, com.google.common.base.Function<java.lang.Exception, X>); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> immediateFuture(V); public static <V, X extends java.lang.Exception> org.spark-project.guava.util.concurrent.CheckedFuture<V, X> immediateCheckedFuture(V); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> immediateFailedFuture(java.lang.Throwable); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> immediateCancelledFuture(); public static <V, X extends java.lang.Exception> org.spark-project.guava.util.concurrent.CheckedFuture<V, X> immediateFailedCheckedFuture(X); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> withFallback(org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>, org.spark-project.guava.util.concurrent.FutureFallback<? extends V>); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> withFallback(org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>, org.spark-project.guava.util.concurrent.FutureFallback<? extends V>, java.util.concurrent.Executor); public static <I, O> org.spark-project.guava.util.concurrent.ListenableFuture<O> transform(org.spark-project.guava.util.concurrent.ListenableFuture<I>, org.spark-project.guava.util.concurrent.AsyncFunction<? super I, ? extends O>); public static <I, O> org.spark-project.guava.util.concurrent.ListenableFuture<O> transform(org.spark-project.guava.util.concurrent.ListenableFuture<I>, org.spark-project.guava.util.concurrent.AsyncFunction<? super I, ? extends O>, java.util.concurrent.Executor); public static <I, O> org.spark-project.guava.util.concurrent.ListenableFuture<O> transform(org.spark-project.guava.util.concurrent.ListenableFuture<I>, com.google.common.base.Function<? super I, ? extends O>); public static <I, O> org.spark-project.guava.util.concurrent.ListenableFuture<O> transform(org.spark-project.guava.util.concurrent.ListenableFuture<I>, com.google.common.base.Function<? super I, ? extends O>, java.util.concurrent.Executor); public static <I, O> java.util.concurrent.Future<O> lazyTransform(java.util.concurrent.Future<I>, com.google.common.base.Function<? super I, ? extends O>); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<V> dereference(org.spark-project.guava.util.concurrent.ListenableFuture<? extends org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>>); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<java.util.List<V>> allAsList(org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>...); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<java.util.List<V>> allAsList(java.lang.Iterable<? extends org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>>); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<java.util.List<V>> successfulAsList(org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>...); public static <V> org.spark-project.guava.util.concurrent.ListenableFuture<java.util.List<V>> successfulAsList(java.lang.Iterable<? extends org.spark-project.guava.util.concurrent.ListenableFuture<? extends V>>); public static <V> void addCallback(org.spark-project.guava.util.concurrent.ListenableFuture<V>, org.spark-project.guava.util.concurrent.FutureCallback<? super V>); public static <V> void addCallback(org.spark-project.guava.util.concurrent.ListenableFuture<V>, org.spark-project.guava.util.concurrent.FutureCallback<? super V>, java.util.concurrent.Executor); public static <V, X extends java.lang.Exception> V get(java.util.concurrent.Future<V>, java.lang.Class<X>) throws X; public static <V, X extends java.lang.Exception> V get(java.util.concurrent.Future<V>, long, java.util.concurrent.TimeUnit, java.lang.Class<X>) throws X; public static <V> V getUnchecked(java.util.concurrent.Future<V>); static {}; }
You can follow the instructions here https://arjon.es/2015/making-hadoop-2.6-spark-cassandra-driver-play-nice-together/ to also do the shading yourself at compile time. With spark-shell, you can avoid some changes to spark.driver.extraClassPath as mentioned here , although it may continue to collide at various points.