Maple without technology 2022-01-26 14:55:50 阅读数:937
1
package examples;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi {
// Determine if the path exists
public static boolean test(Configuration conf,String path) throws IOException{
FileSystem fs = FileSystem.get(conf);
return fs.exists(new Path(path));
}
// Copy the file to the specified path , If the path already exists , Then cover
public static void copyFromLocalFile(Configuration conf,String localFilePath,String remoteFilePath) throws IOException{
FileSystem fs=FileSystem.get(conf);
Path localPath = new Path (localFilePath);
Path remotePath = new Path(remoteFilePath);
//fs.copyFromLocalFile The first parameter indicates whether to delete the source file , The second parameter indicates whether to overwrite
fs.copyFromLocalFile(false,true,localPath,remotePath);
fs.close();
}
// Add document content
public static void appendToFlie(Configuration conf,String localFilePath,String remoteFilePath) throws IOException{
FileSystem fs = FileSystem.get(conf);
Path remotePath = new Path(remoteFilePath);
//
FileInputStream in = new FileInputStream(localFilePath);
//
FSDataOutputStream out = fs.append(remotePath);
//
byte[] data = new byte[1024];
int read = -1;
while ((read=in.read(data))>0){
out.write(data,0,read);
}
out.close();
in.close();
fs.close();
}
// The main function
public static void main(String[] args){
Configuration conf = new Configuration();
conf.set("fs.defaultFs","http://localhost:9870");
String localFilePath = "home/ download /text.txt";// The local path
String remoteFilePath = "/user/hadoop/text.txt";//hdfs route
String choice = "append";
String choice2 ="overwrite";
try {
// Judge whether the file exists
Boolean fileExists = false;
if(HDFSApi.test(conf,remoteFilePath)){
fileExists= true;
System.out.println(remoteFilePath+" Already exists ");
}else {
System.out.println(remoteFilePath+" non-existent .");
}
// To deal with
if(!fileExists){
// file does not exist , Then upload
HDFSApi.copyFromLocalFile(conf,localFilePath,remoteFilePath);
System.out.println(localFilePath+" Has been uploaded to the "+remoteFilePath);
} else if(choice.equals("overwrite")){
// Select overlay
HDFSApi.copyFromLocalFile(conf,localFilePath,remoteFilePath);
System.out.println(localFilePath+" Covered "+remoteFilePath);
} else if (choice.equals("append")){
// Select append
HDFSApi.appendToFlie(conf,localFilePath,remoteFilePath);
System.out.println(localFilePath+" Added to "+remoteFilePath);
}
}catch(Exception e){
e.printStackTrace();
}
}
}
2
package example;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi {
// Download files to local
// Determine whether the local path already exists , If it already exists , Rename automatically
public static void copyToLocal(Configuration conf,String remoteFilePath,
String localFilePath)throws IOException{
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path(remoteFilePath);
File f=new File (localFilePath);
// If the filename exists , Auto rename
if(f.exists()){
System.out.println (localFilePath+" Already exists .");
Integer i=0;
while (true){
f=new File(localFilePath+"_"+i.toString());
if(!f.exists()){
localFilePath=localFilePath+" "+i.toString ();
break;
}
}
System.out.println(" Rename to ∶"+localFilePath);
}
// Download files to local
Path localPath=new Path (localFilePath);
fs.copyToLocalFile (remotePath,localPath);
fs.close ();
}
public static void main(String[] args){
Configuration conf=new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String localFilePath="/home/hadoop/text.txt";// The local path
String remoteFilePath="/user/hadoop/text.txt"; //HDFs route
try{
HDFSApi.copyToLocal(conf,remoteFilePath,localFilePath);
System.out.println(" Download complete ");
} catch(Exception e){
e.printStackTrace();
}
}
}
3
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi {
// Read file contents
public static void cat(Configuration conf,string remoteFilePath)throws
IOException {
FileSystem fs=FileSystem.get (conf);
PathremotePath=new Path (remoteFilePath);
FSDataInputStream in=fs.open (remotePath);
BufferedReader d=new BufferedReader(new InputStreamReader(in));
String line=null;
while((line=d.readLine ())!=null){
System.out.println (line);
}
d.close();
in.close();
fs.close();
}
// The main function
public static void main(String[] args)(
Configuration conf=new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath="/user/hadoop/text.txt";//HDFS route
try {
System.out.println(" Read the file ∶"+remoteFilePath);
HDFSApi.cat (conf,remoteFilePath);
System.out.println("\n Read complete ");
} catch (Exception e){
e.printStackTrace ();
}
}
}
4
package example;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
importjava.io.* ;
import java.text.SimpleDateFormat;
public class HDFSApi {
// Displays information about the specified file
public static void ls(Configuration conf,String remoteFilePath)throws
IOException{
FilesSystem fs=FileSystem.get (conf);
Path remotePath=new Path(remoteFilePath);
FileStatus[] fileStatuses=fs.listStatus(remotePath);
for(FileStatus s: fileStatuses){
System.out.println(" route ∶"+s.getPath().toString());
System.out.println(" jurisdiction ∶"+s.getPermission().toString());
System.out.println(" size ∶ "+s.getLen());
/* What's returned is a timestamp , Convert to time date format */
Long timeStamp=s.getModificationTime ();
SimpleDateFormat format=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String date=format.format(timeStamp);
System.out.println(" Time ∶ "+date);
}
fs.close();
}
// The main function
public static void main(String[] args){
Configuration conf=new Configuration ();
conf.set("fs.defaultFs","http://localhost:9870");
String remoteFilePath="/user/hadoop/text.txt";//HDFS route
try {
System.out.println(" Read file information ∶"+remoteFilePath);
HDFSApi.ls(conf,remoteFilePath);
System.out.println("\n Read complete ");
} catch(Exception e){
e.printStackTrace ();
}
}
}
5
package example;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
import java.text.SimpleDateFormat;
public class HDFSApi{
// Display the information of all files in the specified folder ( recursive )
public static void lsDir(Configuration conf,String remoteDir)throws
IOException{
FileSystem fs=FileSystem.get (conf);
Path dirPath=new Path (remoteDir);
// Recursively get all the files in the directory
RemoteIterator<LocatedFileStatus>remoteIterator=fs.listFiles
(dirPath,true);
// Output the information of each file
while(remoteIterator.hasNext ()){
FileStatus s=remoteIterator.next ();
System.out.println(" route ∶"+s.getPath().toString());
System.out.println(" jurisdiction ∶"+s.getPermission ().toString());
System.out.println(" size ∶ "+s.getLen());
Long timeStamp=s.getModificationTime ();
SimpleDateFormat format=new SimpleDateFormat("yyyy-MM-dd HH:mm:ss");
String date= format.format(timeStamp);
System.out.println(" Time ∶"+date);
System.out.println ();
}
fs.close();
}
// The main function
public static void main (String[] args){
Configuration conf=new Con figuration ();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteDir="/user/hadoop";//HDFS route
try (
System.out.println("( recursive ) Read the information of all files in the directory ∶"+remoteDir));
HDFSApi.lsDir(conf,remoteDir);
System.out.println(" Read complete ");
} catch(Exception e){
e.printStackTrace ();
}
}
}
6
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi(
// Determine if the path exists
public static boolean test(Configuration conf,String path)throws IOException {
FileSystem fs=FileSystem.get (conf);
return fs.exists(new Path (path));
}
// Create directory
public static boolean mkdir(Configuration conf,String remoteDir)throws IOException{
FileSystem fs=FileSystem.get(conf);
Path dirPath=new Path (remoteDir);
boolean result=fs.mkdirs(dirPath);
fs.close ();
return result;
}
// create a file
public static void touchz(Configuration conf,String remoteFilePath) throws IOException {
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
FSDataOutputStream outputStream=fs.create(remotePath);
outputStream.close ();
fs.close();
}
// Delete file
public static boolean rm(Configuration conf,String remoteFilePath) throws IOException {
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
boolean result=fs.delete(remotePath,false);
fs.close ();
return result;
}
// The main function
public static void main(String[] args) {
Configuration conf=new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath="/user/hadoop/input/text.txt";//HDFS route
String remoteDir="/user/hadoop/input"; //HDFS The directory corresponding to the path
try {
// Determine if the path exists , Delete if it exists , Otherwise, create
if(HDFSApi.test(conf,remoteFilePath)){
HDFSApi.rm(conf,remoteFilePath);// Delete
System.out.println(" Delete path ∶"+remoteFilePath);
)}else{
if(!HDFSApi.test(conf,remoteDir))( // If the directory does not exist , Then create
HDFSApi.mkdir(conf,remoteDir);
System.out.println(" Create folder ∶"+remoteDir);
}
HDFSApi.touchz(conf,remoteFilePath);
System.out.println(" Create a path ∶"+remoteFilePath);
)
) catch(Exception e) {
e.printStackTrace ();
}
}
7
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi{
// Determine if the path exists
public static boolean test(Configuration conf,Stringpath)throws
IOException {
FileSystem fs=FileSystem.get (conf);
// Determine whether the directory is empty
//true∶ empty ;false∶ Non empty
public static boolean isDirEmpty(Configuration conf,String remoteDir)
throws IOException {
FileSystem fs=FileSystem.get(conf);
Path dirPath=new Path (remoteDir);
RemoteIterator<LocatedFileStatus>remoteIterator=fs.listFiles
(dirPath,true);
return !remoteIterator.hasNext ();
}
// Create directory
public static boolean mkdir(Configuration conf,String remoteDir)throws
IOException {
FileSystem fs=FileSystem.get(conf);
Path dirPath=new Path (remoteDir);
boolean result=fs.mkdirs (dirPath);
fs.close ();
return result;
}
// Delete directory
public static boolean rmDir(Configuration conf,String remoteDir)throws IOException {
FileSystem fs=FileSystem.get(conf);
Path dirPath=new Path (remoteDir);
// The second parameter indicates whether to delete all files recursively
boolean result=fs.delete(dirPath,true);
fs.close();
return result;
}
// The main function
public static void main(String[] args){
Configuration conf=new Configuration();
conf.set("fs.defaultFS","hdfs://localhost:9000");
String remoteDir="/user/hadoop/input";//HDFS Catalog
Boolean forceDelete=false;// Whether to force the deletion of
try {
/* Determine whether the directory exists , Create if it does not exist , Delete if it exists */
if(!HDFSApi.test(conf,remoteDir)){
HDFSApi.mkdir(conf,remoteDir);// Create directory
System.out.println(" Create directory ∶"+remoteDir);
)else (
if(HDFSApi.isDirEmpty(conf,remoteDir)|I forceDelete){
// Directory is empty or forcibly deleted
HDFSApi.rmDir(conf,remoteDir);
System.out.println(" Delete directory ∶"+remoteDir);
} else{
// Directory is not empty
System.out.println(" Directory is not empty , Don't delete ∶"+remoteDir);
}
}
}catch(Exception e){
e.printStackTrace ();
}
}
}
8
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop. fs.*;
import java.io.*;
public class HDFSApi (
/** * Determine if the path exists */
public static boolean test(Configuration conf,String path)throws
IOException {
FileSystem fs=FileSystem.get(conf);
return fs.exists (new Path (path));
}
// Add text content
public static void appendContentToFile(Configuration conf,String
content,String remoteFilePath)throws IOException
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path(remoteFilePath);
/* Create a file output stream , The output will be appended to the end of the file */
FSDataOutputStream out=fs.append(remotePath);
out.write (content.getBytes ());
out.close();
fs.close();
/** * Add document content */
public static void appendToFile(Configuration conf,String localFilePath,
String remoteFilePath)throws IOException{
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
FileInputStream in=new FileInputStream(localFilePath);
// Create a file output stream , The output will be appended to the end of the file
FSDataOutputStream out=fs.append(remotePath);
/* Read and write the contents of the file */
byte[] data=new byte[1024];
int read=-1;
while((read=in.read(data))>0) {
out.write (data,0,read);
}
out.write(data,0,read);
}
out.close();
in.close();
fs.close();
}
// Move files to local
// Delete source files after move
public static void moveToLocalFile(Configuration conf,String
remoteFilePath,String localFilePath)throws IOException {
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
Path localPath=new Path (localFilePath);
fs.moveToLocalFile (remotePath,localPath);
}
// create a file
public static void touchz(Configuration conf,String remoteFilePath)
throws IOException {
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
FSDataOutputStream outputStream=fs.create(remotePath);
outputStream.close ();
fs.close();
}
// The main function
public static void main(String[] args){
Configuration conf=new Configuration();
conf.set("fs.default.name","hdfs://localhost:9000");
String remoteFilePath="/user/hadoop/text.txt"; //HDFS file
String content=" New additions \n";
String choice= "after"; // Append to end of file
String choice= "before"; // Append to the beginning of the file
try {
/* Judge whether the file exists */
if(!HDFSApi.test(conf,remoteFilePath)){
System.out.println(" file does not exist ∶"+remoteFilePath);
}else {
if (choice.equals("after")){
// Append at the end of the file
HDFSApi.appendContentToFile(conf,content,remoteFilePath);
System.out.println(" Has been appended to the end of the file "+remoteFilePath);
}else if(choice.equals("before")){
// Append to the beginning of the file
// There is no corresponding API Can be operated directly , So first move the file to the local
// Create a new HDFS, Then add the contents in order
String localTmpPath="/user/hadoop/tmp.txt";
// Move to local
HDFSApi.moveToLocalFile(conf,remoteFilePath,localmpPath);
// Create a new file
HDFSApi.touchz (conf,remoteFilePath);
// Write new content first
HDFSApi.appendContentToFile(conf,content,remoteFilePath));
// Then write the original content
HDFSApi.appendToFile(conf,localTmpPath,remoteFilePath);
System.out.println(" Appended to the beginning of the file ∶"+remoteFilePath);
}
force
} catch (Exception e){
e.printStackTrace ();
}
}
}
9
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi{
// Delete file
public static boolean rm(Configuration conf,String remoteFilePath)
throws IOException {
FileSystem fs=FileSystem.get(conf);
Path remotePath=new Path (remoteFilePath);
boolean result=fs.delete (remotePath,false);
fs.close ();
return result;
}
// The main function
public static void main(String[] args){
Configuration conf=new Configuration ();
conf.set("fs.defaultFs","hdfs://localhost:9000");
String remoteFilePath="/user/hadoop/text.txt";//HDFS file
try {
if(HDFSApi.rm(conf,remoteFilePath)){
System.out.println(" File deletion ∶"+remoteFilePath);
)else {
System.out.println(" operation failed ( File does not exist or deletion failed )");
}
} catch (Exception e){
e.printStackTrace ();
}
}
}
10
importorg.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.*;
import java.io.*;
public class HDFSApi{
// Moving files
public static boolean mv(Configuration conf,String remoteFilePath,
String remoteToFilePath)throws IOException {
FileSystem fs=FileSystem.get (conf);
Path srcPath=new Path (remoteFilePath);
Path dstPath=new Path (remoteToFilePath);
boolean result=fs.rename(srcPath,dstPath);
fs.close();
return result;
}
// The main function
public static void main(String[] args){
Configuration conf=new Configuration ();
conf.set("fs.defaultFs","hdfs://localhost:9000");
String remoteFilePath="hdfs:///user/hadoop/text.txt";
// Source file HDFS route
String remoteToFilePath= "hdfs:///user/hadoop/new.txt";
// Purpose HDFS route
try {
if(HDFSApi.mv(conf,remoteFilePath,remoteToFilePath))
System.out.println(" Will file "+remoteFilePath+" Move to "+
remoteToFilePath);
)else{
System.out.printiln(" operation failed ( Source file does not exist or move failed )");
}
} catch(Exception e){
e.printStackTrace ();
}
}
}
11
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
importjava.io.*;
public class MyFSDataInputStream extends FSDataInputStream{
public MyFSDataInputStream(InputStream in){
super(in);
}
// Read by line
// Read in one character at a time , encounter "\n" end , Return a line
public static String readline(BufferedReader br)throws IOException{
char[] data=new char[1024];
int read=-1;
int off=0;
// Loop execution ,br Each time, it will continue reading from the end of the last reading
// So in this function ,off Every time from 0 Start
while((read=br.read(data,off,1))!=-1){
if(String.valueOf(data[off]).equals("\n")){
off+=1;
break;
}
off+=1;
}
if(off>0){
return String.valueOf (data);
}else {
return null;
}
}
// Read file contents
public static void cat(Configuration conf,String remoteFilePath)throws
IOException {
FileSystem fs=FileSystem.get (conf);
Path remotePath=new Path (remoteFilePath);
FSDataInputStream in= fs,open (remotePath);
BufferedReaderbr=new BufferedReader(new InputStreamReader(in);
String line= null;
while((line=MyFSDataInputStream.readline(br))!=null){
System.out.println (line);
}
br.close();
in.close();
fs.close ();
}
// The main function
public static void main(String [] args){
Configuration conf=new Configuration();
conf.set("fs.defaultFs","hdfs://localhost:9000");
String remoteFilePath="/user/hadoop/text.txt";//HDFS route
try {
MyFSDataInputStream.cat (conf,remoteFilePath);
} catch(Exception e){
e.printStackTrace ();
}
}
}
12
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.IOUtils;
import java.io.*;
import java.net.URL;
public class HDFSApi {
static{
URL.setURLStreamHandlerFactory(new FsUrlStreamHandlerFactory());
}
// The main function
public static void main(String[] args)throws Exception{
String remoteFilePath="hdfs:///user/hadoop/text.txt";//HDFS file
InputStream in=null;
try{
// adopt URL Object to open the data stream , Read data from
in=new URL(remoteFilePath).openStream();
IOUtils.copyBytes(in,System.out,4096,false);
}finally{
IOUtils.closeStream(in);
}
}
}
copyright:author[Maple without technology],Please bring the original link to reprint, thank you. https://en.javamana.com/2022/01/202201261455481046.html