刚刚开始编写 Hadoop MR 作业。希望我们能尽快切换到 Spark,但我们目前仍坚持使用 MR。
我想按记录值的散列值对记录进行分组。但我想用完全不相关的东西对它们进行排序——它们值中的时间戳。我对如何最好地做到这一点感到困惑。我看到两个选项:
1) 第一个 MR 作业计算其映射器中每个值的散列,然后将该散列的所有记录减少到它想要的相同值(我实际上有这么多工作,正如我们现在需要的那样).然后链接第二个 MR 作业,该作业根据值中的时间戳对上面的 reducer 的输出进行重新排序。效率低下?
2) 我已经阅读了一些关于如何使用复合键的博客/帖子,所以也许我可以一步完成所有这些?我会创建某种复合键,它既有用于分组的散列,又有用于在映射器中排序的时间戳。但我不清楚这是否可能。如果排序与分组完全无关,它还能正确分组吗?也不确定我需要实现哪些接口(interface)以及我需要创建哪些类或如何配置它。
我不是在谈论次要排序。对于每次 reduce 调用,我不关心 Iterator 中对象的顺序。我关心事物从 reducer 发出的顺序,需要按时间戳进行全局排序。
执行此类操作的推荐方法是什么?
最佳答案
如果您可以使用之前封装分组和排序属性的复合键来减少,那绝对有可能。
假设您需要一个包含 int 哈希码和长时间戳的 key 。然后你需要实现一个可写的元组(比如 IntLongPair),你可以在其中定义你的用例所需的各种比较器和分区器。
所以你将你的工作设置成这样(稍后我将回到可能的 IntLongPair 实现):
job.setPartitionerClass(IntLongPair.IntOnlyPartitioner.class); //partition by your hash code stored in the int part of the part
job.setGroupingComparatorClass(IntLongPair.IntAscComparator.class); //your hash code grouping - perhaps does not matter ascending or descending
job.setSortComparatorClass(IntLongPair.IntDescLongAscComparator.class); //assuming you need newest items first
大号
这是您可以使用的 IntLongPair:
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.mapreduce.Partitioner;
public class IntLongPair implements WritableComparable<IntLongPair> {
private IntWritable intVal = new IntWritable();
private LongWritable longVal = new LongWritable();
public void write(DataOutput d) throws IOException {
intVal.write(d);
longVal.write(d);
}
public void readFields(DataInput di) throws IOException {
intVal.readFields(di);
longVal.readFields(di);
}
/**
* Natural order is int first, long next
* @param o
* @return
*/
public int compareTo(IntLongPair o) {
int diff = intVal.compareTo(o.intVal);
if (diff != 0) {
return diff;
}
return longVal.compareTo(o.longVal);
}
public IntWritable getInt() {
return intVal;
}
public void setInt(IntWritable intVal) {
this.intVal = intVal;
}
public void setInt(int intVal) {
this.intVal.set(intVal);
}
public LongWritable getLong() {
return longVal;
}
public void setLong(LongWritable longVal) {
this.longVal = longVal;
}
public void setLong(long longVal) {
this.longVal.set(longVal);
}
@Override
public boolean equals(Object obj) {
if (obj == null) {
return false;
}
if (getClass() != obj.getClass()) {
return false;
}
final IntLongPair other = (IntLongPair) obj;
if (this.intVal != other.intVal && (this.intVal == null || !this.intVal.equals(other.intVal))) {
return false;
}
if (this.longVal != other.longVal && (this.longVal == null || !this.longVal.equals(other.longVal))) {
return false;
}
return true;
}
@Override
public int hashCode() {
int hash = 3;
hash = 47 * hash + (this.intVal != null ? this.intVal.hashCode() : 0);
hash = 47 * hash + (this.longVal != null ? this.longVal.hashCode() : 0);
return hash;
}
@Override
public String toString() {
return "IntLongPair{" + intVal + ',' + longVal + '}';
}
public IntWritable getFirst() {
return intVal;
}
public LongWritable getSecond() {
return longVal;
}
public void setFirst(IntWritable value) {
intVal.set(value.get());
}
public void setSecond(LongWritable value) {
longVal.set(value.get());
}
public static class Comparator extends WritableComparator {
public Comparator() {
super(IntLongPair.class);
}
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return compareBytes(b1, s1, l1, b2, s2, l2);
}
}
static { // register this comparator
WritableComparator.define(IntLongPair.class, new Comparator());
}
public static class IntDescLongAscComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int comp = IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
if (comp != 0) {
return -comp;
}
return LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
}
public int compare(IntLongPair o1, IntLongPair o2) {
int comp = o1.getInt().compareTo(o2.getInt());
if (comp != 0) {
return -comp;
}
return o1.getLong().compareTo(o2.getLong());
}
}
public static class LongAscIntAscComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int comp = LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
if (comp != 0) {
return comp;
}
return IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
int comp = o1.getLong().compareTo(o2.getLong());
if (comp != 0) {
return comp;
}
return o1.getInt().compareTo(o2.getInt());
}
}
public static class LongAscIntDescComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int comp = LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
if (comp != 0) {
return comp;
}
return -IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
int comp = o1.getLong().compareTo(o2.getLong());
if (comp != 0) {
return comp;
}
return -o1.getInt().compareTo(o2.getInt());
}
}
public static class LongDescIntAscComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int comp = LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
if (comp != 0) {
return -comp;
}
return IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
int comp = o1.getLong().compareTo(o2.getLong());
if (comp != 0) {
return -comp;
}
return o1.getInt().compareTo(o2.getInt());
}
}
public static class LongDescIntDescComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
int comp = LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
if (comp != 0) {
return -comp;
}
return -IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
int comp = o1.getLong().compareTo(o2.getLong());
if (comp != 0) {
return -comp;
}
return -o1.getInt().compareTo(o2.getInt());
}
}
public static class IntAscComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
return o1.getInt().compareTo(o2.getInt());
}
}
public static class IntDescComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -IntWritable.Comparator.compareBytes(b1, s1, 4, b2, s2, 4);
}
public int compare(IntLongPair o1, IntLongPair o2) {
return -o1.getInt().compareTo(o2.getInt());
}
}
public static class LongAscComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
}
public int compare(IntLongPair o1, IntLongPair o2) {
return o1.getLong().compareTo(o2.getLong());
}
}
public static class LongDescComparator implements RawComparator<IntLongPair> {
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return -LongWritable.Comparator.compareBytes(b1, s1 + 4, 8, b2, s2 + 4, 8);
}
public int compare(IntLongPair o1, IntLongPair o2) {
return -o1.getLong().compareTo(o2.getLong());
}
}
/**
* Partition based on the long part of the pair.
*/
public static class LongOnlyPartitioner extends Partitioner<IntLongPair, Writable> {
@Override
public int getPartition(IntLongPair key, Writable value,
int numPartitions) {
return Math.abs(key.getLong().hashCode() & Integer.MAX_VALUE) % numPartitions;
}
}
/**
* Partition based on the int part of the pair.
*/
public static class IntOnlyPartitioner extends Partitioner<IntLongPair, Writable> {
@Override
public int getPartition(IntLongPair key, Writable value,
int numPartitions) {
return Math.abs(key.getInt().hashCode() & Integer.MAX_VALUE) % numPartitions;
}
}
}
关于java - Hadoop Map Reduce - 如何将分组与排序分开?,我们在Stack Overflow上找到一个类似的问题: https://stackoverflow.com/questions/37933152/