nixos/hadoop: add HTTPFS

This commit is contained in:
illustris 2021-11-02 18:34:04 +05:30
parent 39c007ce9c
commit 9ca4363191
4 changed files with 78 additions and 10 deletions

View file

@ -35,6 +35,7 @@ pkgs.runCommand "hadoop-conf" {} ''
cp ${siteXml "hdfs-site.xml" cfg.hdfsSite}/* $out/
cp ${siteXml "mapred-site.xml" cfg.mapredSite}/* $out/
cp ${siteXml "yarn-site.xml" cfg.yarnSite}/* $out/
cp ${siteXml "httpfs-site.xml" cfg.httpfsSite}/* $out/
cp ${cfgFile "container-executor.cfg" cfg.containerExecutorCfg}/* $out/
cp ${pkgs.writeTextDir "hadoop-user-functions.sh" userFunctions}/* $out/
cp ${pkgs.writeTextDir "hadoop-env.sh" hadoopEnv}/* $out/

View file

@ -70,6 +70,17 @@ with lib;
description = "Hadoop yarn-site.xml definition";
};
httpfsSite = mkOption {
default = { };
type = types.attrsOf types.anything;
example = literalExpression ''
{
"hadoop.http.max.threads" = 500;
}
'';
description = "Hadoop httpfs-site.xml definition";
};
log4jProperties = mkOption {
default = "${cfg.package}/lib/${cfg.package.untarDir}/etc/hadoop/log4j.properties";
type = types.path;
@ -118,7 +129,8 @@ with lib;
config = mkMerge [
(mkIf (builtins.hasAttr "yarn" config.users.users ||
builtins.hasAttr "hdfs" config.users.users) {
builtins.hasAttr "hdfs" config.users.users ||
builtins.hasAttr "httpfs" config.users.users) {
users.groups.hadoop = {
gid = config.ids.gids.hadoop;
};

View file

@ -86,6 +86,23 @@ in
};
inherit restartIfChanged;
};
httpfs = {
enabled = mkOption {
type = types.bool;
default = false;
description = ''
Whether to run the HDFS httpfs failover controller
'';
};
tempPath = mkOption {
type = types.path;
default = "/tmp/hadoop/httpfs";
description = ''
HTTPFS_TEMP path used by HTTPFS
'';
};
inherit restartIfChanged;
};
};
config = mkMerge [
@ -166,6 +183,31 @@ in
};
};
})
(mkIf cfg.hdfs.httpfs.enabled {
systemd.services.hdfs-httpfs = {
description = "Hadoop httpfs";
wantedBy = [ "multi-user.target" ];
inherit (cfg.hdfs.httpfs) restartIfChanged;
environment = {
HTTPFS_TEMP = cfg.hdfs.httpfs.tempPath;
};
preStart = ''
mkdir -p $HTTPFS_TEMP
'';
serviceConfig = {
User = "httpfs";
SyslogIdentifier = "hdfs-httpfs";
ExecStart = "${cfg.package}/bin/hdfs --config ${hadoopConf} httpfs";
Restart = "always";
};
};
networking.firewall.allowedTCPPorts = (mkIf cfg.hdfs.datanode.openFirewall [
14000 # httpfs.http.port
]);
})
(mkIf (
cfg.hdfs.namenode.enabled || cfg.hdfs.datanode.enabled || cfg.hdfs.journalnode.enabled || cfg.hdfs.zkfc.enabled
) {
@ -175,6 +217,12 @@ in
uid = config.ids.uids.hdfs;
};
})
(mkIf cfg.hdfs.httpfs.enabled {
users.users.httpfs = {
description = "Hadoop HTTPFS user";
group = "hadoop";
isSystemUser = true;
};
})
];
}

View file

@ -2,19 +2,20 @@
import ../make-test-python.nix ({...}: {
nodes = {
namenode = {pkgs, ...}: {
virtualisation.memorySize = 1024;
services.hadoop = {
package = pkgs.hadoop;
hdfs.namenode = {
hdfs = {
namenode = {
enabled = true;
formatOnInit = true;
};
httpfs.enabled = true;
};
coreSite = {
"fs.defaultFS" = "hdfs://namenode:8020";
};
hdfsSite = {
"dfs.replication" = 1;
"dfs.namenode.rpc-bind-host" = "0.0.0.0";
"dfs.namenode.http-bind-host" = "0.0.0.0";
"hadoop.proxyuser.httpfs.groups" = "*";
"hadoop.proxyuser.httpfs.hosts" = "*";
};
};
};
@ -24,6 +25,8 @@ import ../make-test-python.nix ({...}: {
hdfs.datanode.enabled = true;
coreSite = {
"fs.defaultFS" = "hdfs://namenode:8020";
"hadoop.proxyuser.httpfs.groups" = "*";
"hadoop.proxyuser.httpfs.hosts" = "*";
};
};
};
@ -49,5 +52,9 @@ import ../make-test-python.nix ({...}: {
datanode.succeed("sudo -u hdfs hdfs dfsadmin -safemode wait")
datanode.succeed("echo testfilecontents | sudo -u hdfs hdfs dfs -put - /testfile")
assert "testfilecontents" in datanode.succeed("sudo -u hdfs hdfs dfs -cat /testfile")
namenode.wait_for_unit("hdfs-httpfs")
namenode.wait_for_open_port(14000)
assert "testfilecontents" in datanode.succeed("curl -f \"http://namenode:14000/webhdfs/v1/testfile?user.name=hdfs&op=OPEN\" 2>&1")
'';
})