diff options
author | illustris <rharikrishnan95@gmail.com> | 2023-09-22 16:34:07 +0530 |
---|---|---|
committer | illustris <rharikrishnan95@gmail.com> | 2023-09-22 16:34:07 +0530 |
commit | 1cd3c804bf4644fc168883c15b60baf596aba4fc (patch) | |
tree | a967dac6ff29f0f1d8b58d3481c0f18c47e52e38 | |
parent | 9be640a232166f14be4d38145970ffd63623c4d7 (diff) | |
download | nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar.gz nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar.bz2 nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar.lz nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar.xz nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.tar.zst nixpkgs-1cd3c804bf4644fc168883c15b60baf596aba4fc.zip |
hadoop, nixos/hadoop: remove untarDir
-rw-r--r-- | nixos/modules/services/cluster/hadoop/default.nix | 18 | ||||
-rw-r--r-- | nixos/modules/services/cluster/hadoop/yarn.nix | 2 | ||||
-rw-r--r-- | nixos/tests/hadoop/hadoop.nix | 2 | ||||
-rw-r--r-- | pkgs/applications/networking/cluster/hadoop/default.nix | 45 |
4 files changed, 33 insertions, 34 deletions
diff --git a/nixos/modules/services/cluster/hadoop/default.nix b/nixos/modules/services/cluster/hadoop/default.nix index 72bf25c2114..ff6b4d5588b 100644 --- a/nixos/modules/services/cluster/hadoop/default.nix +++ b/nixos/modules/services/cluster/hadoop/default.nix @@ -67,16 +67,16 @@ with lib; mapredSiteDefault = mkOption { default = { "mapreduce.framework.name" = "yarn"; - "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; - "mapreduce.map.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; - "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=${cfg.package}/lib/${cfg.package.untarDir}"; + "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; + "mapreduce.map.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; + "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=${cfg.package}"; }; defaultText = literalExpression '' { "mapreduce.framework.name" = "yarn"; - "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; - "mapreduce.map.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; - "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}"; + "yarn.app.mapreduce.am.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; + "mapreduce.map.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; + "mapreduce.reduce.env" = "HADOOP_MAPRED_HOME=''${config.${opt.package}}"; } ''; type = types.attrsOf types.anything; @@ -154,13 +154,13 @@ with lib; }; log4jProperties = mkOption { - default = "${cfg.package}/lib/${cfg.package.untarDir}/etc/hadoop/log4j.properties"; + default = "${cfg.package}/etc/hadoop/log4j.properties"; defaultText = literalExpression '' - "''${config.${opt.package}}/lib/''${config.${opt.package}.untarDir}/etc/hadoop/log4j.properties" + "''${config.${opt.package}}/etc/hadoop/log4j.properties" ''; type = types.path; example = literalExpression '' - "''${pkgs.hadoop}/lib/''${pkgs.hadoop.untarDir}/etc/hadoop/log4j.properties"; + "''${pkgs.hadoop}/etc/hadoop/log4j.properties"; ''; description = lib.mdDoc "log4j.properties file added to HADOOP_CONF_DIR"; }; diff --git a/nixos/modules/services/cluster/hadoop/yarn.nix b/nixos/modules/services/cluster/hadoop/yarn.nix index 26077f35fdd..a49aafbd1dc 100644 --- a/nixos/modules/services/cluster/hadoop/yarn.nix +++ b/nixos/modules/services/cluster/hadoop/yarn.nix @@ -160,7 +160,7 @@ in umount /run/wrappers/yarn-nodemanager/cgroup/cpu || true rm -rf /run/wrappers/yarn-nodemanager/ || true mkdir -p /run/wrappers/yarn-nodemanager/{bin,etc/hadoop,cgroup/cpu} - cp ${cfg.package}/lib/${cfg.package.untarDir}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/ + cp ${cfg.package}/bin/container-executor /run/wrappers/yarn-nodemanager/bin/ chgrp hadoop /run/wrappers/yarn-nodemanager/bin/container-executor chmod 6050 /run/wrappers/yarn-nodemanager/bin/container-executor cp ${hadoopConf}/container-executor.cfg /run/wrappers/yarn-nodemanager/etc/hadoop/ diff --git a/nixos/tests/hadoop/hadoop.nix b/nixos/tests/hadoop/hadoop.nix index b132f4fa58b..0de2366b186 100644 --- a/nixos/tests/hadoop/hadoop.nix +++ b/nixos/tests/hadoop/hadoop.nix @@ -249,7 +249,7 @@ import ../make-test-python.nix ({ package, ... }: { assert "standby" in client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState") client.succeed("sudo -u yarn yarn rmadmin -getAllServiceState | systemd-cat") - assert "Estimated value of Pi is" in client.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~lib/hadoop-*/share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10") + assert "Estimated value of Pi is" in client.succeed("HADOOP_USER_NAME=hdfs yarn jar $(readlink $(which yarn) | sed -r 's~bin/yarn~share/hadoop/mapreduce/hadoop-mapreduce-examples-*.jar~g') pi 2 10") assert "SUCCEEDED" in client.succeed("yarn application -list -appStates FINISHED") ''; }) diff --git a/pkgs/applications/networking/cluster/hadoop/default.nix b/pkgs/applications/networking/cluster/hadoop/default.nix index 481217b44a8..b952331a23b 100644 --- a/pkgs/applications/networking/cluster/hadoop/default.nix +++ b/pkgs/applications/networking/cluster/hadoop/default.nix @@ -29,11 +29,11 @@ assert elem stdenv.system [ "x86_64-linux" "x86_64-darwin" "aarch64-linux" "aarc let common = { - pname, platformAttrs, untarDir ? "${pname}-${version}", jdk - , nativeLibs ? [ ], libPatchesGenerator ? (_: ""), tests + pname, platformAttrs, jdk, nativeLibs ? [ ] + , libPatchesGenerator ? (_: ""), tests }: stdenv.mkDerivation (finalAttrs: { - inherit pname jdk untarDir; + inherit pname jdk; libPatches = libPatchesGenerator finalAttrs; version = platformAttrs.${stdenv.system}.version or (throw "Unsupported system: ${stdenv.system}"); src = fetchurl { @@ -51,29 +51,29 @@ let }; nativeBuildInputs = [ makeWrapper ] - ++ optionals (stdenv.isLinux && (nativeLibs != [ ] || (libPatches finalAttrs) != "")) [ autoPatchelfHook ]; + ++ optionals (stdenv.isLinux && (nativeLibs != [ ] || (libPatchesGenerator finalAttrs) != "")) [ autoPatchelfHook ]; buildInputs = [ openssl ] ++ nativeLibs; installPhase = '' - mkdir -p $out/{lib/${finalAttrs.untarDir}/conf,bin,lib} - mv * $out/lib/${finalAttrs.untarDir} + mkdir $out + mv * $out/ '' + optionalString stdenv.isLinux '' for n in $(find ${finalAttrs.containerExecutor}/bin -type f); do - ln -sf "$n" $out/lib/${finalAttrs.untarDir}/bin + ln -sf "$n" $out/bin done '' + '' - for n in $(find $out/lib/${finalAttrs.untarDir}/bin -type f ! -name "*.*"); do - makeWrapper "$n" "$out/bin/$(basename $n)"\ + for n in $(find $out/bin -type f ! -name "*.*"); do + wrapProgram "$n"\ --set-default JAVA_HOME ${finalAttrs.jdk.home}\ - --set-default HADOOP_HOME $out/lib/${finalAttrs.untarDir}\ + --set-default HADOOP_HOME $out/\ --run "test -d /etc/hadoop-conf && export HADOOP_CONF_DIR=\''${HADOOP_CONF_DIR-'/etc/hadoop-conf/'}"\ - --set-default HADOOP_CONF_DIR $out/lib/${finalAttrs.untarDir}/etc/hadoop/\ + --set-default HADOOP_CONF_DIR $out/etc/hadoop/\ --prefix PATH : "${makeBinPath [ bash coreutils which]}"\ --prefix JAVA_LIBRARY_PATH : "${makeLibraryPath finalAttrs.buildInputs}" done '' + optionalString sparkSupport '' # Add the spark shuffle service jar to YARN - cp ${spark.src}/yarn/spark-${spark.version}-yarn-shuffle.jar $out/lib/${finalAttrs.untarDir}/share/hadoop/yarn/ + cp ${spark.src}/yarn/spark-${spark.version}-yarn-shuffle.jar $out/share/hadoop/yarn/ '' + (finalAttrs.libPatches); passthru = { inherit tests; }; @@ -101,24 +101,24 @@ let }); nativeLibs = [ stdenv.cc.cc.lib protobuf zlib snappy libtirpc ]; libPatchesGenerator = finalAttrs: ('' - ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/${finalAttrs.untarDir}/lib/native/libsasl2.so.2 - ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/${finalAttrs.untarDir}/lib/native/ - ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ - ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ - ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/${finalAttrs.untarDir}/lib/native/ + ln -s ${getLib cyrus_sasl}/lib/libsasl2.so $out/lib/native/libsasl2.so.2 + ln -s ${getLib openssl}/lib/libcrypto.so $out/lib/native/ + ln -s ${getLib zlib}/lib/libz.so.1 $out/lib/native/ + ln -s ${getLib zstd}/lib/libzstd.so.1 $out/lib/native/ + ln -s ${getLib bzip2}/lib/libbz2.so.1 $out/lib/native/ '' + optionalString stdenv.isLinux '' # libjvm.so for Java >=11 - patchelf --add-rpath ${finalAttrs.jdk.home}/lib/server $out/lib/${finalAttrs.untarDir}/lib/native/libnativetask.so.1.0.0 + patchelf --add-rpath ${finalAttrs.jdk.home}/lib/server $out/lib/native/libnativetask.so.1.0.0 # Java 8 has libjvm.so at a different path - patchelf --add-rpath ${finalAttrs.jdk.home}/jre/lib/amd64/server $out/lib/${finalAttrs.untarDir}/lib/native/libnativetask.so.1.0.0 + patchelf --add-rpath ${finalAttrs.jdk.home}/jre/lib/amd64/server $out/lib/native/libnativetask.so.1.0.0 # NixOS/nixpkgs#193370 # This workaround is needed to use protobuf 3.19 # for hadoop 3.3 - patchelf --replace-needed libprotobuf.so.18 libprotobuf.so $out/lib/${finalAttrs.untarDir}/lib/native/libhdfspp.so + patchelf --replace-needed libprotobuf.so.18 libprotobuf.so $out/lib/native/libhdfspp.so # for hadoop 3.2 - patchelf --replace-needed libprotobuf.so.8 libprotobuf.so $out/lib/${finalAttrs.untarDir}/lib/native/libhdfspp.so + patchelf --replace-needed libprotobuf.so.8 libprotobuf.so $out/lib/native/libhdfspp.so patchelf --replace-needed libcrypto.so.1.1 libcrypto.so \ - $out/lib/${finalAttrs.untarDir}/lib/native/{libhdfspp.so.0.1.0,examples/{pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}} + $out/lib/native/{libhdfspp.so.0.1.0,examples/{pipes-sort,wordcount-nopipe,wordcount-part,wordcount-simple}} ''); in { @@ -138,7 +138,6 @@ in }; aarch64-darwin = aarch64-linux; }; - untarDir = "${pname}-${platformAttrs.${stdenv.system}.version}"; jdk = jdk11_headless; inherit nativeLibs libPatchesGenerator; # TODO: Package and add Intel Storage Acceleration Library |