在ppc64le下编译hadoop-2.2.0的fuse_dfs,libhadoop.so,libhdfs.so

步骤一:下载hadoop源码

下载地址:https://archive.apache.org/dist/hadoop/core/

步骤二:参考hadoop源码内的BUILDING.txt准备编译环境

Requirements:

  • Unix System
  • JDK 1.6+
  • Maven 3.0 or later
  • Findbugs 1.3.9 (if running findbugs)
  • ProtocolBuffer 2.5.0
  • CMake 2.6 or newer (if compiling native code)
  • Internet connection for first build (to fetch all Maven and Hadoop dependencies)

步骤三:编译ProtocolBuffer 2.5.0(其他依赖例如Maven自行准备)

  1. 编译命令./configure --build=ppc64le

编译报错:configure: error: cannot guess build type; you must specify one

  1. 下载文件atomicops_internals_ppc_gcc.h至目录/root/lsc/protobuf-2.5.0/src/google/protobuf/stubs

    下载地址:https://github.com/protocolbuffers/protobuf/blob/513a8a69df75d2ea116e3a109143cb487e756f9e/src/google/protobuf/stubs/atomicops_internals_ppc_gcc.h

// Protocol Buffers - Google's data interchange format
// Copyright 2015 Google Inc.  All rights reserved.
// https://developers.google.com/protocol-buffers/
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
//     * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//     * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
//     * Neither the name of Google Inc. nor the names of its
// contributors may be used to endorse or promote products derived from
// this software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

// Author: ogabbay@advaoptical.com (Oded Gabbay)
// Cleaned up by: bsilver16384@gmail.com (Brian Silverman)
//
// This file is an internal atomic implementation, use atomicops.h instead.

#ifndef GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PPC_GCC_H_
#define GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PPC_GCC_H_

#define ATOMICOPS_COMPILER_BARRIER() __asm__ __volatile__("" : : : "memory")

namespace google {
namespace protobuf {
namespace internal {

inline Atomic32 NoBarrier_CompareAndSwap(volatile Atomic32 *ptr,
                                         Atomic32 old_value,
                                         Atomic32 new_value) {
  Atomic32 prev;

  __asm__ __volatile__(
      "0:                                  \n\t"
      "lwarx %[prev],0,%[ptr]              \n\t"
      "cmpw 0,%[prev],%[old_value]         \n\t"
      "bne- 1f                             \n\t"
      "stwcx. %[new_value],0,%[ptr]        \n\t"
      "bne- 0b                             \n\t"
      "1:                                  \n\t"
      : [prev] "=&r"(prev), "+m"(*ptr)
      : [ptr] "r"(ptr), [old_value] "r"(old_value), [new_value] "r"(new_value)
      : "cc", "memory");

  return prev;
}

inline Atomic32 NoBarrier_AtomicExchange(volatile Atomic32 *ptr,
                                         Atomic32 new_value) {
  Atomic32 old;

  __asm__ __volatile__(
      "0:                                  \n\t"
      "lwarx %[old],0,%[ptr]               \n\t"
      "stwcx. %[new_value],0,%[ptr]        \n\t"
      "bne- 0b                             \n\t"
      : [old] "=&r"(old), "+m"(*ptr)
      : [ptr] "r"(ptr), [new_value] "r"(new_value)
      : "cc", "memory");

  return old;
}

inline Atomic32 NoBarrier_AtomicIncrement(volatile Atomic32 *ptr,
                                          Atomic32 increment) {
  Atomic32 temp;

  __asm__ __volatile__(
      "0:                                  \n\t"
      "lwarx %[temp],0,%[ptr]              \n\t"
      "add %[temp],%[increment],%[temp]    \n\t"
      "stwcx. %[temp],0,%[ptr]             \n\t"
      "bne- 0b                             \n\t"
      : [temp] "=&r"(temp)
      : [increment] "r"(increment), [ptr] "r"(ptr)
      : "cc", "memory");

  return temp;
}

inline Atomic32 Barrier_AtomicIncrement(volatile Atomic32 *ptr,
                                        Atomic32 increment) {
  MemoryBarrier();
  Atomic32 res = NoBarrier_AtomicIncrement(ptr, increment);
  MemoryBarrier();
  return res;
}

inline Atomic32 Acquire_CompareAndSwap(volatile Atomic32 *ptr,
                                       Atomic32 old_value, Atomic32 new_value) {
  Atomic32 res = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
  MemoryBarrier();
  return res;
}

inline Atomic32 Release_CompareAndSwap(volatile Atomic32 *ptr,
                                       Atomic32 old_value, Atomic32 new_value) {
  MemoryBarrier();
  Atomic32 res = NoBarrier_CompareAndSwap(ptr, old_value, new_value);
  return res;
}

inline void NoBarrier_Store(volatile Atomic32 *ptr, Atomic32 value) {
  *ptr = value;
}

inline void MemoryBarrier() { __asm__ __volatile__("sync" : : : "memory"); }

inline void Acquire_Store(volatile Atomic32 *ptr, Atomic32 value) {
  *ptr = value;
  MemoryBarrier();
}

inline void Release_Store(volatile Atomic32 *ptr, Atomic32 value) {
  MemoryBarrier();
  *ptr = value;
}

inline Atomic32 NoBarrier_Load(volatile const Atomic32 *ptr) { return *ptr; }

inline Atomic32 Acquire_Load(volatile const Atomic32 *ptr) {
  Atomic32 value = *ptr;
  MemoryBarrier();
  return value;
}

inline Atomic32 Release_Load(volatile const Atomic32 *ptr) {
  MemoryBarrier();
  return *ptr;
}

}  // namespace internal
}  // namespace protobuf
}  // namespace google

#undef ATOMICOPS_COMPILER_BARRIER

#endif  // GOOGLE_PROTOBUF_ATOMICOPS_INTERNALS_PPC_GCC_H_
  1. 修改atomicops.h
    187 #else
    188 GOOGLE_PROTOBUF_ATOMICOPS_ERROR
    187 #elif defined(GOOGLE_PROTOBUF_ARCH_PPC)
    188 #include <google/protobuf/stubs/atomicops_internals_ppc_gcc.h>
    189 #else
    190 #include <google/protobuf/stubs/atomicops_internals_ppc_gcc.h>
    191 //#GOOGLE_PROTOBUF_ATOMICOPS_ERROR
  1. 修改platform_macros.h
61  #error Host architecture was not detected as supported by protobuf

61  //#error Host architecture was not detected as supported by protobuf

参考链接:
https://github.com/bsilver8192/protobuf/commit/ff156e4863c36c08814bfda08e60ff58f6a6cefc
https://blog.csdn.net/FrankieCheng/article/details/40742187

步骤四:编译hadoop-2.2.0的HDFS(包括fuse)

编译命令:mvn package -Pnative -DskipTests -Drequire.fuse=true

编译报错: Error running javah command: Error executing command line. Exit code:127

设置JAVA_HOME

export JAVA_HOME=JDK的地址

编译报错:Package 'fuse', required by 'virtual:world', not found

yum install fuse-devel

编译报错:致命错误:rpc/types.h:没有那个文件或目录

yum -y install libtirpc-devel

安装后如果还是找不到,参考该链接将头文件加入依赖目录内:https://blog.csdn.net/dxgzg/article/details/120913307

编译报错:HadoopPipes.cc:423:16: 错误:聚合‘HMAC_CTX ctx’类型不完全,无法被定义

由于本次编译不包括Pipes组件,不处理
处理参考:https://blog.csdn.net/SundayO/article/details/102932993
处理参考:https://developer.akamai.com/blog/2017/02/14/upgrade-openssl-110

编译报错:对‘xdrmem_create’、‘xdr_float’未定义的引用

由于本次编译不包括Pipes组件,不处理
处理可参考https://stackoverflow.com/questions/62273993/hmac-ctx-has-incomplete-type

其他:修改Fuse的副本数

初步从源码看,修改环境变量LIBHDFS_OPTS应该能设置dfs_fuse的dfs.replication,但是测试过不生效,依旧为默认的副本数3,所以暂时从源码入手进行修改,修改以下源文件:

hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/fuse_impls_truncate.c  
hadoop-hdfs-project/hadoop-hdfs/src/main/native/fuse-dfs/fuse_impls_open.c

hdfsOpenFile(fs, path, flags, 0, 0, 0)中的倒数第二个参数修改为需要的副本数,然后进行重新编译,例如

副本数1:hdfsOpenFile(fs, path, flags, 0, 1, 0)
副本数2:hdfsOpenFile(fs, path, flags, 0, 2, 0)
副本数3:hdfsOpenFile(fs, path, flags, 0, 3, 0)

编译结果:

fuse_dfs位置:hadoop-hdfs-project/hadoop-hdfs/target/native/main/native/fuse-dfs/fuse_dfs
libhadoop.so位置:hadoop-2.2.0-src/hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/libhadoop.so.1.0.0
libhdfs.so位置:hadoop-2.2.0-src/hadoop-hdfs-project/hadoop-hdfs/target/native/target/usr/local/lib/libhdfs.so.0.0.0

最后编辑于
©著作权归作者所有,转载或内容合作请联系作者
平台声明:文章内容(如有图片或视频亦包括在内)由作者上传并发布,文章内容仅代表作者本人观点,简书系信息发布平台,仅提供信息存储服务。

推荐阅读更多精彩内容