intellij idea - mrunit execute test method error HADOOP_HOME or hadoop.home.dir are not set -
i want test hadoop2 mapreduce code,use mrunit have error.local machine don't install hadoop in windows. pom.xml below:
<dependency> <groupid>org.apache.hadoop</groupid> <artifactid>hadoop-core</artifactid> <version>2.6.0-mr1-cdh5.4.2</version> </dependency> <dependency> <groupid>org.apache.hadoop</groupid> <artifactid>hadoop-common</artifactid> <version>2.6.0-cdh5.4.2</version> </dependency> <dependency> <groupid>org.apache.mrunit</groupid> <artifactid>mrunit</artifactid> <version>1.1.0</version> <classifier>hadoop2</classifier> <scope>test</scope> </dependency> <dependency> <groupid>org.mockito</groupid> <artifactid>mockito-all</artifactid> <version>1.9.5</version> <scope>test</scope> </dependency>
map code belows :
package com.hadoop; import org.apache.hadoop.io.intwritable; import org.apache.hadoop.io.longwritable; import org.apache.hadoop.io.text; import org.apache.hadoop.mapreduce.mapper; /** * created zhu on 2015/6/3. */ public class smscdrmapper extends mapper<longwritable, text, text, intwritable> { private text status = new text(); private final static intwritable addone = new intwritable(1); /** * returns sms status code , count */ protected void map(longwritable key, text value, context context) throws java.io.ioexception, interruptedexception { //655209;1;796764372490213;804422938115889;6 sample record format string[] line = value.tostring().split(";"); // if record of sms cdr if (integer.parseint(line[1]) == 1) { status.set(line[4]); context.write(status, addone); } } }
reduce code belows:
package com.hadoop; import org.apache.hadoop.io.intwritable; import org.apache.hadoop.io.text; import org.apache.hadoop.mapreduce.reducer; /** * created zhu on 2015/6/3. */ public class smscdrreducer extends reducer<text, intwritable, text, intwritable> { protected void reduce(text key, iterable<intwritable> values, context context) throws java.io.ioexception, interruptedexception { int sum = 0; (intwritable value : values) { sum += value.get(); } context.write(key, new intwritable(sum)); } }
mrunit code test belows:
package com.hadoop; /** * created zhu on 2015/6/3. */ import java.io.ioexception; import java.util.arraylist; import java.util.list; import org.apache.hadoop.conf.configuration; import org.apache.hadoop.io.intwritable; import org.apache.hadoop.io.longwritable; import org.apache.hadoop.io.text; import org.apache.hadoop.mrunit.mapreduce.mapdriver; import org.apache.hadoop.mrunit.mapreduce.mapreducedriver; import org.apache.hadoop.mrunit.mapreduce.reducedriver; import org.junit.before; import org.junit.test; public class smscdrmapperreducertest { mapdriver<longwritable, text, text, intwritable> mapdriver; reducedriver<text, intwritable, text, intwritable> reducedriver; mapreducedriver<longwritable, text, text, intwritable, text, intwritable> mapreducedriver; configuration conf = new configuration(); @before public void setup() { smscdrmapper mapper = new smscdrmapper(); smscdrreducer reducer = new smscdrreducer(); mapdriver = mapdriver.newmapdriver(mapper); reducedriver = reducedriver.newreducedriver(reducer); mapreducedriver = mapreducedriver.newmapreducedriver(mapper, reducer); mapdriver.setconfiguration(conf); conf.set("myparameter1", "20"); conf.set("myparameter2", "23"); } @test public void testmapper() { mapdriver.withinput(new longwritable(), new text( "655209;1;796764372490213;804422938115889;6")); mapdriver.withoutput(new text("6"), new intwritable(1)); try { mapdriver.runtest(); } catch (ioexception e) { e.printstacktrace(); } } @test public void testreducer() throws ioexception { list<intwritable> values = new arraylist<intwritable>(); values.add(new intwritable(1)); values.add(new intwritable(1)); reducedriver.withinput(new text("6"), values); reducedriver.withoutput(new text("6"), new intwritable(2)); reducedriver.runtest(); } @test public void testmapreduce() throws ioexception { mapreducedriver.withinput(new longwritable(), new text( "655209;1;796764372490213;804422938115889;6")); list<intwritable> values = new arraylist<intwritable>(); values.add(new intwritable(1)); values.add(new intwritable(1)); mapreducedriver.withoutput(new text("6"), new intwritable(2)); mapreducedriver.runtest(); } }
execute test method have error.use mrunit must install hadoop in local machine?
16:03:40,383 debug shell - failed detect valid hadoop home directory java.io.ioexception: hadoop_home or hadoop.home.dir not set. @ org.apache.hadoop.util.shell.checkhadoophome(shell.java:302) @ org.apache.hadoop.util.shell.<clinit>(shell.java:327) @ org.apache.hadoop.util.stringutils.<clinit>(stringutils.java:79) @ org.apache.hadoop.conf.configuration.getstrings(configuration.java:1767) @ org.apache.hadoop.io.serializer.serializationfactory.<init>(serializationfactory.java:58) @ org.apache.hadoop.mrunit.internal.io.serialization.<init>(serialization.java:39) @ org.apache.hadoop.mrunit.testdriver.getserialization(testdriver.java:530) @ org.apache.hadoop.mrunit.testdriver.copy(testdriver.java:675) @ org.apache.hadoop.mrunit.testdriver.copypair(testdriver.java:679) @ org.apache.hadoop.mrunit.mapreducedriverbase.addinput(mapreducedriverbase.java:66) @ org.apache.hadoop.mrunit.mapreducedriverbase.withinput(mapreducedriverbase.java:119) @ com.hadoop.smscdrmapperreducertest.testmapreduce(smscdrmapperreducertest.java:62) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:601) @ org.junit.runners.model.frameworkmethod$1.runreflectivecall(frameworkmethod.java:45) @ org.junit.internal.runners.model.reflectivecallable.run(reflectivecallable.java:15) @ org.junit.runners.model.frameworkmethod.invokeexplosively(frameworkmethod.java:42) @ org.junit.internal.runners.statements.invokemethod.evaluate(invokemethod.java:20) @ org.junit.internal.runners.statements.runbefores.evaluate(runbefores.java:28) @ org.junit.runners.parentrunner.runleaf(parentrunner.java:263) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:68) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:47) @ org.junit.runners.parentrunner$3.run(parentrunner.java:231) @ org.junit.runners.parentrunner$1.schedule(parentrunner.java:60) @ org.junit.runners.parentrunner.runchildren(parentrunner.java:229) @ org.junit.runners.parentrunner.access$000(parentrunner.java:50) @ org.junit.runners.parentrunner$2.evaluate(parentrunner.java:222) @ org.junit.runners.parentrunner.run(parentrunner.java:300) @ org.junit.runner.junitcore.run(junitcore.java:157) @ com.intellij.junit4.junit4ideatestrunner.startrunnerwithargs(junit4ideatestrunner.java:78) @ com.intellij.rt.execution.junit.junitstarter.preparestreamsandstart(junitstarter.java:212) @ com.intellij.rt.execution.junit.junitstarter.main(junitstarter.java:68) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:601) @ com.intellij.rt.execution.application.appmain.main(appmain.java:140) 16:03:40,406 error shell - failed locate winutils binary in hadoop binary path java.io.ioexception: not locate executable null\bin\winutils.exe in hadoop binaries. @ org.apache.hadoop.util.shell.getqualifiedbinpath(shell.java:355) @ org.apache.hadoop.util.shell.getwinutilspath(shell.java:370) @ org.apache.hadoop.util.shell.<clinit>(shell.java:363) @ org.apache.hadoop.util.stringutils.<clinit>(stringutils.java:79) @ org.apache.hadoop.conf.configuration.getstrings(configuration.java:1767) @ org.apache.hadoop.io.serializer.serializationfactory.<init>(serializationfactory.java:58) @ org.apache.hadoop.mrunit.internal.io.serialization.<init>(serialization.java:39) @ org.apache.hadoop.mrunit.testdriver.getserialization(testdriver.java:530) @ org.apache.hadoop.mrunit.testdriver.copy(testdriver.java:675) @ org.apache.hadoop.mrunit.testdriver.copypair(testdriver.java:679) @ org.apache.hadoop.mrunit.mapreducedriverbase.addinput(mapreducedriverbase.java:66) @ org.apache.hadoop.mrunit.mapreducedriverbase.withinput(mapreducedriverbase.java:119) @ com.hadoop.smscdrmapperreducertest.testmapreduce(smscdrmapperreducertest.java:62) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:601) @ org.junit.runners.model.frameworkmethod$1.runreflectivecall(frameworkmethod.java:45) @ org.junit.internal.runners.model.reflectivecallable.run(reflectivecallable.java:15) @ org.junit.runners.model.frameworkmethod.invokeexplosively(frameworkmethod.java:42) @ org.junit.internal.runners.statements.invokemethod.evaluate(invokemethod.java:20) @ org.junit.internal.runners.statements.runbefores.evaluate(runbefores.java:28) @ org.junit.runners.parentrunner.runleaf(parentrunner.java:263) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:68) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:47) @ org.junit.runners.parentrunner$3.run(parentrunner.java:231) @ org.junit.runners.parentrunner$1.schedule(parentrunner.java:60) @ org.junit.runners.parentrunner.runchildren(parentrunner.java:229) @ org.junit.runners.parentrunner.access$000(parentrunner.java:50) @ org.junit.runners.parentrunner$2.evaluate(parentrunner.java:222) @ org.junit.runners.parentrunner.run(parentrunner.java:300) @ org.junit.runner.junitcore.run(junitcore.java:157) @ com.intellij.junit4.junit4ideatestrunner.startrunnerwithargs(junit4ideatestrunner.java:78) @ com.intellij.rt.execution.junit.junitstarter.preparestreamsandstart(junitstarter.java:212) @ com.intellij.rt.execution.junit.junitstarter.main(junitstarter.java:68) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ java.lang.reflect.method.invoke(method.java:601) @ com.intellij.rt.execution.application.appmain.main(appmain.java:140) 16:03:40,447 debug mapreducedriver - starting map phase mapper: com.hadoop.smscdrmapper@1a74d2a 16:03:40,970 debug mapreducedriver - starting reduce phase reducer: com.hadoop.smscdrreducer@1b18eb3 16:03:41,010 debug reducephaserunner - reducing input ((6, 1)) 16:03:41,103 error testdriver - missing expected output (6, 2) @ position 0, got (6, 1). java.lang.assertionerror: 1 error(s): (missing expected output (6, 2) @ position 0, got (6, 1).) @ org.junit.assert.fail(assert.java:93) @ org.apache.hadoop.mrunit.internal.util.errors.assertnone(errors.java:73) @ org.apache.hadoop.mrunit.testdriver.validate(testdriver.java:768) @ org.apache.hadoop.mrunit.testdriver.runtest(testdriver.java:641) @ org.apache.hadoop.mrunit.testdriver.runtest(testdriver.java:627) @ com.hadoop.smscdrmapperreducertest.testmapreduce(smscdrmapperreducertest.java:68) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ sun.reflect.delegatingmethodaccessorimpl.invoke(delegatingmethodaccessorimpl.java:43) @ org.junit.runners.model.frameworkmethod$1.runreflectivecall(frameworkmethod.java:45) @ org.junit.internal.runners.model.reflectivecallable.run(reflectivecallable.java:15) @ org.junit.runners.model.frameworkmethod.invokeexplosively(frameworkmethod.java:42) @ org.junit.internal.runners.statements.invokemethod.evaluate(invokemethod.java:20) @ org.junit.internal.runners.statements.runbefores.evaluate(runbefores.java:28) @ org.junit.runners.parentrunner.runleaf(parentrunner.java:263) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:68) @ org.junit.runners.blockjunit4classrunner.runchild(blockjunit4classrunner.java:47) @ org.junit.runners.parentrunner$3.run(parentrunner.java:231) @ org.junit.runners.parentrunner$1.schedule(parentrunner.java:60) @ org.junit.runners.parentrunner.runchildren(parentrunner.java:229) @ org.junit.runners.parentrunner.access$000(parentrunner.java:50) @ org.junit.runners.parentrunner$2.evaluate(parentrunner.java:222) @ org.junit.runners.parentrunner.run(parentrunner.java:300) @ org.junit.runner.junitcore.run(junitcore.java:157) @ com.intellij.junit4.junit4ideatestrunner.startrunnerwithargs(junit4ideatestrunner.java:78) @ com.intellij.rt.execution.junit.junitstarter.preparestreamsandstart(junitstarter.java:212) @ com.intellij.rt.execution.junit.junitstarter.main(junitstarter.java:68) @ sun.reflect.nativemethodaccessorimpl.invoke0(native method) @ sun.reflect.nativemethodaccessorimpl.invoke(nativemethodaccessorimpl.java:57) @ com.intellij.rt.execution.application.appmain.main(appmain.java:140)
how solve problem ?
i had simmilar problem , got solved replacing dependency "hadoop-common" "hadoop-yarn-common". hope helps too.
also sceptical dependency "hadoop-core" version "2.6.0-mr1-cdh5.4.2" mentioned in question, believe mapreduce1(and not mapreduce2) compatibility.
Comments
Post a Comment