|
$ git clone git://github.com/twitter/scalding.git
Scalding set up:
$ cd scalding
$ export SCALDING_HOME=`pwd`
$ sbt update
$ sbt test
$ sbt assembly
com.hadoop.gplcompression#hadoop-lzo;0.4.19: not found
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: UNRESOLVED DEPENDENCIES ::
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[warn] :: com.hadoop.gplcompression#hadoop-lzo;0.4.19: not found
[warn] ::::::::::::::::::::::::::::::::::::::::::::::
[info] Updating {file:/home/abc/scala/scalding/}scalding-macros...
[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
sbt.ResolveException: unresolved dependency: com.hadoop.gplcompression#hadoop-lzo;0.4.19: not found
at sbt.IvyActions$.sbt$IvyActions$$resolve(IvyActions.scala:217)
at sbt.IvyActions$$anonfun$update$1.apply(IvyActions.scala:126)
at sbt.IvyActions$$anonfun$update$1.apply(IvyActions.scala:125)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:115)
at sbt.IvySbt$Module$$anonfun$withModule$1.apply(Ivy.scala:115)
at sbt.IvySbt$$anonfun$withIvy$1.apply(Ivy.scala:103)
at sbt.IvySbt.sbt$IvySbt$$action$1(Ivy.scala:48)
at sbt.IvySbt$$anon$3.call(Ivy.scala:57)
at xsbt.boot.Locks$GlobalLock.withChannel$1(Locks.scala:98)
at xsbt.boot.Locks$GlobalLock.xsbt$boot$Locks$GlobalLock$$withChannelRetries$1(Locks.scala:81)
at xsbt.boot.Locks$GlobalLock$$anonfun$withFileLock$1.apply(Locks.scala:102)
at xsbt.boot.Using$.withResource(Using.scala:11)
at xsbt.boot.Using$.apply(Using.scala:10)
at xsbt.boot.Locks$GlobalLock.ignoringDeadlockAvoided(Locks.scala:62)
at xsbt.boot.Locks$GlobalLock.withLock(Locks.scala:52)
at xsbt.boot.Locks$.apply0(Locks.scala:31)
at xsbt.boot.Locks$.apply(Locks.scala:28)
at sbt.IvySbt.withDefaultLogger(Ivy.scala:57)
at sbt.IvySbt.withIvy(Ivy.scala:98)
at sbt.IvySbt.withIvy(Ivy.scala:94)
at sbt.IvySbt$Module.withModule(Ivy.scala:115)
at sbt.IvyActions$.update(IvyActions.scala:125)
at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1223)
at sbt.Classpaths$$anonfun$sbt$Classpaths$$work$1$1.apply(Defaults.scala:1221)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$74.apply(Defaults.scala:1244)
at sbt.Classpaths$$anonfun$doWork$1$1$$anonfun$74.apply(Defaults.scala:1242)
at sbt.Tracked$$anonfun$lastOutput$1.apply(Tracked.scala:35)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1246)
at sbt.Classpaths$$anonfun$doWork$1$1.apply(Defaults.scala:1241)
at sbt.Tracked$$anonfun$inputChanged$1.apply(Tracked.scala:45)
at sbt.Classpaths$.cachedUpdate(Defaults.scala:1249)
at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1214)
at sbt.Classpaths$$anonfun$updateTask$1.apply(Defaults.scala:1192)
at scala.Function1$$anonfun$compose$1.apply(Function1.scala:47)
at sbt.$tilde$greater$$anonfun$$u2219$1.apply(TypeFunctions.scala:42)
at sbt.std.Transform$$anon$4.work(System.scala:64)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1$$anonfun$apply$1.apply(Execute.scala:237)
at sbt.ErrorHandling$.wideConvert(ErrorHandling.scala:18)
at sbt.Execute.work(Execute.scala:244)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.Execute$$anonfun$submit$1.apply(Execute.scala:237)
at sbt.ConcurrentRestrictions$$anon$4$$anonfun$1.apply(ConcurrentRestrictions.scala:160)
at sbt.CompletionService$$anon$2.call(CompletionService.scala:30)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
at java.util.concurrent.FutureTask.run(FutureTask.java:262)
at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
at java.lang.Thread.run(Thread.java:745)
[error] (scalding-commons/*:update) sbt.ResolveException: unresolved dependency: com.hadoop.gplcompression#hadoop-lzo;0.4.19: not found
cd /scalding/project
add this to the file
"test" at "http://repo.typesafe.com/typesafe/releases",
resolvers ++= Seq(
"Local Maven Repository" at "file://" + Path.userHome.absolutePath + "/.m2/repository",
"snapshots" at "https://oss.sonatype.org/content/repositories/snapshots",
"releases" at "https://oss.sonatype.org/content/repositories/releases",
"Concurrent Maven Repo" at "http://conjars.org/repo",
"Clojars Repository" at "http://clojars.org/repo",
"Twitter Maven" at "http://maven.twttr.com",
"test" at "http://repo.typesafe.com/typesafe/releases",
"Cloudera" at "https://repository.cloudera.com/artifactory/cloudera-repos/"
),
[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] downloading http://repo.typesafe.com/typesafe/releases/com/hadoop/gplcompression/hadoop-lzo/0.4.19/hadoop-lzo-0.4.19.jar ...
[info] [SUCCESSFUL ] com.hadoop.gplcompression#hadoop-lzo;0.4.19!hadoop-lzo.jar (2311ms)
[info] Done updating.
[info] Updating {file:/home/abc/scala/scalding/}scalding-macros...
[info] Resolving org.fusesource.jansi#jansi;1.4 ...
[info] Done updating.
[success] Total time: 160 s, completed Apr 21, 2015 7:09:36 PM
问题记录:
sbt test通不过,scalding-hadoop-test:test unsuccess
注释scalding-hadoop-test文件夹下面所有extends WordSpec的语句,重新执行sbt test
例子:
// class WordCountTest extends WordSpec with Matchers {
// "A WordCount job" should {
// JobTest(new com.twitter.scalding.examples.WordCountJob(_))
// .arg("input", "inputFile")
// .arg("output", "outputFile")
// .source(TextLine("inputFile"), List((0, "hack hack hack and hack")))
// .sink[(String, Int)](TypedTsv[(String, Long)]("outputFile")){ outputBuffer =>
// val outMap = outputBuffer.toMap
// "count words correctly" in {
// outMap("hack") shouldBe 4
// outMap("and") shouldBe 1
// }
// }
// .run
// .finish
// }
// } |
|