java代码上传本地文件到linux服务器,并put到hadoop(需要路径版,还会发一个无需路径版)
作者:互联网
新建工具类:
package com.lrhealth.mappingintegration.utils; import com.jcraft.jsch.Channel; import com.jcraft.jsch.ChannelExec; import com.jcraft.jsch.ChannelSftp; import com.jcraft.jsch.JSch; import com.jcraft.jsch.Session; import com.jcraft.jsch.UserInfo; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Component; import com.jcraft.jsch.ChannelExec; import org.springframework.web.multipart.MultipartFile; import java.io.BufferedReader; import java.io.IOException; import java.io.InputStream; import java.io.InputStreamReader; import java.io.LineNumberReader; import java.io.OutputStream; import java.net.URI; import java.net.URISyntaxException; /** * ClassName: UDFUtils * Description: * * @author pangyq * date: 2021/9/17 14:12 */ @Component public class UDFUtils { public static String host; public static int port; public static String user; public static String password; @Value("${server.host}") public void setHost(String serverHost) { host = serverHost; } @Value("${server.user}") public void setUser(String serverUser) { user = serverUser; } @Value("${server.password}") public void setPassword(String serverPassword) { password = serverPassword; } @Value("${server.upport}") public void setPort(Integer serverPort) { port = serverPort; } private Session session; /** * 创建一个连接 */ private void initialSession() throws Exception { if (session == null) { JSch jsch = new JSch(); session = jsch.getSession(user, host, port); session.setUserInfo(new UserInfo() { @Override public String getPassphrase() { return null; } @Override public String getPassword() { return null; } @Override public boolean promptPassword(String arg0) { return false; } @Override public boolean promptPassphrase(String arg0) { return false; } @Override public boolean promptYesNo(String arg0) { return true; } @Override public void showMessage(String arg0) { } }); session.setPassword(password); session.connect(); } } /** * 上传文件 * * @param localPath 本地路径,若为空,表示当前路径 * @param localFile 本地文件名,若为空或是“*”,表示目前下全部文件 * @param remotePath 远程路径,若为空,表示当前路径,若服务器上无此目录,则会自动创建 * @throws Exception */ public void putFile(String localPath, String localFile, String remotePath) throws Exception { ChannelExec openChannel = null; this.initialSession(); Channel channelSftp = session.openChannel("sftp"); channelSftp.connect(); ChannelSftp c = (ChannelSftp) channelSftp; String remoteFile = null; if (remotePath != null && remotePath.trim().length() > 0) { try { c.mkdir(remotePath); } catch (Exception e) { } remoteFile = remotePath + "/."; } else { remoteFile = "."; } String file = null; if (localFile == null || localFile.trim().length() == 0) { file = "*"; } else { file = localFile; } if (localPath != null && localPath.trim().length() > 0) { if (localPath.endsWith("/")) { file = localPath + file; } else { file = localPath + "/" + file; } } c.put(file, remoteFile); //把jar包放到hdfs目录 openChannel = (ChannelExec) session.openChannel("exec"); openChannel.setCommand("su - hdfs"); openChannel.setCommand("hdfs dfs -put " + remotePath + localFile + " /udfjar"); int exitStatus = openChannel.getExitStatus(); openChannel.connect(); channelSftp.disconnect(); }
}
配置文件:
#服务器信息 server.host=服务器IP地址 server.upport=端口号 server.user=用户 server.password=密码
控制层:
自己根据需要写,我把内容掏空了嘿嘿!
@PostMapping("/createFuction")
public ResponseResult createFuction(参数自己写吧,我方的实体类@RequestbodyTmpDbUploadJarDTO tmpDbUploadJarDTO
) throws IOException {
try {
boolean uploadUDF = tmpDbInfoService.uploadUDF(tmpDbUploadJarDTO);
if (uploadUDF == true) {
// tmpDbInfoService.addJarToHive(tmpDbUploadJarDTO);
// tmpDbInfoService.makeFunction(tmpDbUploadJarDTO);
}
} catch (Exception e) {
return ResponseResult.createErrorResponse("创建失败");
}
return ResponseResult.createSuccessResponse("创建成功");
}
postman测试:
{ // "localPath":"E:\\UDFutil", // "localFile":"UDF-tolower0922test.jar", // "remotePath":"/root/zgw/hive/udf/", "functionName":"toLower", "classPath":"" }
好了,拿去用吧,这个需要本地文件路径,本地文件名,远程服务器路径,等下发个我正使用的版本!
标签:jsch,java,String,路径,hadoop,import,null,public 来源: https://www.cnblogs.com/xxcbz/p/15325503.html