scala - sbt 程序集合并问题 [去重 : different file contents found in the following]

标签 scala apache-spark sbt sbt-assembly sbt-plugin

我关注了 stackoverflow 中的其他 sbt 程序集合并问题并添加了合并策略,但仍然没有得到解决。我添加了依赖树插件,但它没有显示传递库的依赖性。我已经使用了 sbt 的最新合并策略,但这个重复内容问题仍然存在。

build.sbt:-

import sbtassembly.Log4j2MergeStrategy

name := ""
organization := "" // change to your org
version := "0.1"

scalaVersion := "2.11.8"
val sparkVersion = "2.1.1"

resolvers += "jitpack" at "https://jitpack.io"
resolvers += "bintray-spark-packages" at "https://dl.bintray.com/spark-packages/maven/"

resolvers += Resolver.url("artifactory", url("http://scalasbt.artifactoryonline.com/scalasbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)

resolvers += Resolver.url("bintray-sbt-plugins", url("http://dl.bintray.com/sbt/sbt-plugin-releases"))(Resolver.ivyStylePatterns)

resolvers +=Resolver.typesafeRepo("releases")

//addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.6")



libraryDependencies ++= Seq(
  ("org.apache.spark" %% "spark-core" % "2.1.1" %"provided").
    exclude("commons-beanutils", "commons-beanutils-core").
    exclude("commons-collections", "commons-collections").
    exclude("commons-logging", "commons-logging").
    exclude("com.esotericsoftware.minlog", "minlog"),
  ("org.apache.spark" %% "spark-hive" % "2.1.1" %"provided").
    exclude("commons-beanutils", "commons-beanutils-core").
    exclude("commons-collections", "commons-collections").
    exclude("commons-logging", "commons-logging").
    exclude("com.esotericsoftware.minlog", "minlog"),
  ("org.apache.spark" %% "spark-sql" % "2.1.1" % "provided").
    exclude("org.mortbay.jetty", "servlet-api").
    exclude("commons-beanutils", "commons-beanutils-core").
    exclude("commons-collections", "commons-collections").
    exclude("commons-logging", "commons-logging").
    exclude("com.esotericsoftware.minlog", "minlog"),
  //"com.databricks" %% "spark-avro" % "3.2.0",
  //"org.json4s" %% "json4s-native" % "3.5.3",
  //"org.scala-lang" % "scala-library" % "2.11.8" % "provided",
  //"org.scala-lang" % "scala-compiler" % "2.11.8" % "provided",
  //"org.slf4j" % "slf4j-log4j12" % "1.7.10",
  // spark-modules
  // "org.apache.spark" %% "spark-graphx" % "1.6.0",
  // "org.apache.spark" %% "spark-mllib" % "1.6.0",
  // "org.apache.spark" %% "spark-streaming" % "1.6.0",

  // spark packages
 // "com.databricks" %% "spark-csv" % "1.3.0",

  // testing
//  "org.scalatest"   %% "scalatest"    % "2.2.4"   % "test,it",
 // "org.scalacheck"  %% "scalacheck"   % "1.12.2"      % "test,it"

  // logging
 // "org.apache.logging.log4j" % "log4j-api" % "2.4.1" exclude("javax.jms", "jms"),
  //"org.apache.logging.log4j" % "log4j-core" % "2.4.1" exclude("javax.jms", "jms"),

  "com.typesafe.config" % "config" % "0.3.0",

  "com.lihaoyi" %% "utest" % "0.6.3" % "test",
  "org.scalatest" %% "scalatest" % "3.0.1" % "test"
  // "com.github.mrpowers" % "spark-daria" % "v2.3.0_0.18.0" % "test"


)

testFrameworks += new TestFramework("utest.runner.Framework")

fork in run := true
javaOptions in run ++= Seq(
  "-Dlog4j.debug=true",
  "-Dlog4j.configuration=log4j.properties")
outputStrategy := Some(StdoutOutput)


fork in Test := true
javaOptions ++= Seq("-Xms512M", "-Xmx2048M", "-XX:+CMSClassUnloadingEnabled","-Duser.timezone=UTC")


//addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.3")

exportJars := true
/* without this explicit merge strategy code you get a lot of noise from sbt-assembly
   complaining about not being able to dedup files */
assemblyMergeStrategy in assembly := {
  case PathList("org", "apache", "hadoop", "yarn", "factories", "package-info.class")         => MergeStrategy.discard
  case PathList("org", "apache", "hadoop", "yarn", "provider", "package-info.class")         => MergeStrategy.discard
  case PathList("org", "apache", "hadoop", "util", "provider", "package-info.class")         => MergeStrategy.discard
  case PathList("org", "apache", "spark", "unused", "UnusedStubClass.class") => MergeStrategy.first
  case PathList("org", "slf4j", xs@_*) => MergeStrategy.last
  case PathList(ps @ _*) if ps.last == "Log4j2Plugins.dat" => Log4j2MergeStrategy.plugincache
  case PathList("javax", "servlet", xs @ _*) => MergeStrategy.last
  case PathList("javax", "activation", xs @ _*) => MergeStrategy.last
  case PathList("org", "apache", xs @ _*) => MergeStrategy.last
  case PathList("com", "google", xs @ _*) => MergeStrategy.last
  case PathList("com", "esotericsoftware", xs @ _*) => MergeStrategy.last
  case PathList("com", "codahale", xs @ _*) => MergeStrategy.last
  case PathList("com", "yammer", xs @ _*) => MergeStrategy.last
  case "about.html" => MergeStrategy.rename
  case "META-INF/ECLIPSEF.RSA" => MergeStrategy.last
  case "META-INF/mailcap" => MergeStrategy.last
  case "META-INF/mimetypes.default" => MergeStrategy.last
  case "plugin.properties" => MergeStrategy.last
  case "log4j.properties" => MergeStrategy.last
  case x =>
    val oldStrategy = (assemblyMergeStrategy in assembly).value
    oldStrategy(x)
}

assemblyExcludedJars in assembly := {
  val cp = (fullClasspath in assembly).value
  cp filter { f =>
    f.data.getName.contains("spark") ||
      f.data.getName == "spark-core_2.11-2.0.1.jar"
  }
}

filterScalaLibrary := false

/* including scala bloats your assembly jar unnecessarily, and may interfere with
   spark runtime */
assemblyOption in assembly := (assemblyOption in assembly).value.copy(includeScala = false)

/* you need to be able to undo the "provided" annotation on the deps when running your spark
   programs locally i.e. from sbt; this bit reincludes the full classpaths in the compile and run tasks. */
fullClasspath in Runtime := (fullClasspath in (Compile, run)).value


//mainClass in Compile := Some("com.dsp.preprocessingMain")
//mainClass in(Compile, run) := Some("com.dsp.preprocessingMain")
//mainClass in(Compile, packageBin) := Some("com.honeywell.dsp.preprocessingMain")
mainClass in assembly := Some("com.dsp.preprocessingMain")

项目/plugin.sbt:-

logLevel := Level.Warn
resolvers += Resolver.url(
  "idio",
  url("http://dl.bintray.com/idio/sbt-plugins")
)(Resolver.ivyStylePatterns)

//resolvers += Resolver.typesafeRepo("releases")
resolvers += "bintray-spark-packages" at "https://dl.bintray.com/spark-packages/maven/"
resolvers += Resolver.url("bintray-sbt-plugins", url("https://dl.bintray.com/eed3si9n/sbt-plugins/"))
addSbtPlugin("net.virtual-void" % "sbt-dependency-graph" % "0.9.0")
addSbtPlugin("org.idio" % "sbt-assembly-log4j2" % "0.1.0")
//addSbtPlugin("org.spark-packages" % "sbt-spark-package" % "0.2.6")
//addSbtPlugin("org.scalariform" % "sbt-scalariform" % "1.6.0")
addSbtPlugin("com.eed3si9n" % "sbt-assembly" % "0.14.5")
addSbtPlugin("org.scala-sbt" % "sbt-duplicates-finder" % "0.7.0")

错误:- sbt 组装

    [warn] Merging 'org\apache\log4j\xml\log4j.dtd' with strategy 'last'
    [error] 18 errors were encountered during merge
    java.lang.RuntimeException: deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Inject.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Inject.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Named.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Named.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Provider.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Provider.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Qualifier.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Qualifier.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Scope.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Scope.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Singleton.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Singleton.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/aop/Advice.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/aop/Advice.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/aop/AspectException.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/aop/AspectException.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/ConstructorInterceptor.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/ConstructorInterceptor.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/ConstructorInvocation.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/ConstructorInvocation.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Interceptor.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Interceptor.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Invocation.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Invocation.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Joinpoint.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Joinpoint.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/MethodInterceptor.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/MethodInterceptor.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/MethodInvocation.class
    C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/MethodInvocation.class
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\commons-beanutils\commons-beanutils\jars\commons-beanutils-1.7.0.jar:overview.html
    C:\Users\h217119\.ivy2\cache\org.codehaus.janino\janino\jars\janino-3.0.0.jar:overview.html
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\com.twitter\parquet-hadoop-bundle\jars\parquet-hadoop-bundle-1.6.0.jar:parquet.thrift
    C:\Users\h217119\.ivy2\cache\org.apache.parquet\parquet-format\jars\parquet-format-2.3.0-incubating.jar:parquet.thrift
    deduplicate: different file contents found in the following:
    C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-core\jars\datanucleus-core-3.2.10.jar:plugin.xml
    C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-api-jdo\jars\datanucleus-api-jdo-3.2.6.jar:plugin.xml
    C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-rdbms\jars\datanucleus-rdbms-3.2.9.jar:plugin.xml
            at sbtassembly.Assembly$.applyStrategies(Assembly.scala:140)
            at sbtassembly.Assembly$.x$1$lzycompute$1(Assembly.scala:25)
            at sbtassembly.Assembly$.x$1$1(Assembly.scala:23)
            at sbtassembly.Assembly$.stratMapping$lzycompute$1(Assembly.scala:23)
            at sbtassembly.Assembly$.stratMapping$1(Assembly.scala:23)
            at sbtassembly.Assembly$.inputs$lzycompute$1(Assembly.scala:67)
            at sbtassembly.Assembly$.inputs$1(Assembly.scala:57)
            at sbtassembly.Assembly$.apply(Assembly.scala:83)
            at sbtassembly.Assembly$$anonfun$assemblyTask$1.apply(Assembly.scala:245)
            at sbtassembly.Assembly$$anonfun$assemblyTask$1.apply(Assembly.scala:242)
            at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
            at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:40)
            at sbt.std.Transform$$anon$4.work(System.scala:63)
            at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
            at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:228)
            at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:17)
            at sbt.Execute.work(Execute.scala:237)
            at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
            at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:228)
            at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:159)
            at sbt.CompletionService$$anon$2.call(CompletionService.scala:28)
            at java.util.concurrent.FutureTask.run(FutureTask.java:266)
            at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
            at java.util.concurrent.FutureTask.run(FutureTask.java:266)
            at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
            at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
            at java.lang.Thread.run(Thread.java:748)
    [error] (*:assembly) deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Inject.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Inject.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Named.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Named.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Provider.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Provider.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Qualifier.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Qualifier.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Scope.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Scope.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\javax.inject\javax.inject\jars\javax.inject-1.jar:javax/inject/Singleton.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\javax.inject\jars\javax.inject-2.4.0-b34.jar:javax/inject/Singleton.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/aop/Advice.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/aop/Advice.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/aop/AspectException.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/aop/AspectException.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/ConstructorInterceptor.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/ConstructorInterceptor.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/ConstructorInvocation.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/ConstructorInvocation.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Interceptor.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Interceptor.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Invocation.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Invocation.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/Joinpoint.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/Joinpoint.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/MethodInterceptor.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/MethodInterceptor.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\aopalliance\aopalliance\jars\aopalliance-1.0.jar:org/aopalliance/intercept/MethodInvocation.class
    [error] C:\Users\h217119\.ivy2\cache\org.glassfish.hk2.external\aopalliance-repackaged\jars\aopalliance-repackaged-2.4.0-b34.jar:org/aopalliance/intercept/MethodInvocation.class
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\commons-beanutils\commons-beanutils\jars\commons-beanutils-1.7.0.jar:overview.html
    [error] C:\Users\h217119\.ivy2\cache\org.codehaus.janino\janino\jars\janino-3.0.0.jar:overview.html
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\com.twitter\parquet-hadoop-bundle\jars\parquet-hadoop-bundle-1.6.0.jar:parquet.thrift
    [error] C:\Users\h217119\.ivy2\cache\org.apache.parquet\parquet-format\jars\parquet-format-2.3.0-incubating.jar:parquet.thrift
    [error] deduplicate: different file contents found in the following:
    [error] C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-core\jars\datanucleus-core-3.2.10.jar:plugin.xml
    [error] C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-api-jdo\jars\datanucleus-api-jdo-3.2.6.jar:plugin.xml
    [error] C:\Users\h217119\.ivy2\cache\org.datanucleus\datanucleus-rdbms\jars\datanucleus-rdbms-3.2.9.jar:plugin.xml
    [error] Total time: 42 s, completed Apr 24, 2018 12:24:13 AM

最佳答案

我按照 sbt 文档尝试了合并策略,我认为它仍然会留下一些重复源错误,所以从其他 stakoverflow 问题中找到按照以下策略丢弃每个重复的 meta-inf。

assemblyMergeStrategy in assembly := {
  case PathList("META-INF", xs @ _*) => MergeStrategy.discard
  case x => MergeStrategy.first
}

关于scala - sbt 程序集合并问题 [去重 : different file contents found in the following],我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/49992899/

相关文章:

Scala:如何更地道地编写尾递归函数

scala - 我应该如何在 Scala 和 Anorm 中使用 MayErr[IntegrityConstraintViolation,Int]?

java - ORM 支持不可变类

scala - Spark 中两个大型数据集之间的交叉连接

scala - sbt任务以增加项目版本

scala - 异常(exception)和参照透明度

apache-spark - 无法使用 Cygwin 在 Windows 上启动 Apache Spark

scala - 如何确定在单个节点上运行的 Spark 的最佳设置?

mysql - 找不到适合 jdbc :mysql when running from single jar 的驱动程序

java - SBT 构建的依赖项(对于 sun.security.util)