目錄
一、python連接操作hdfs
1 往hdfs上傳文件
2 處理并存儲(chǔ)到hdfs
3 讀取hdfs上的txt文件
這里使用的是 pip 安裝,很方便:
pip install hdfs
一、python連接操作hdfs
from hdfs.client import Client
client = Client("http://LocalHost:Port")
client.makedirs('/ml/zmingmingmng')#建立文件夾
client.delete('/ml/zmming')#刪除文件夾
client.upload("/ml/zmingmingmng/zm.txt","E:/ttt/testhdfs.txt")#上傳文件
client.download("/ml/zmingmingmng/zm.txt","E:/ming.txt")#下載文件
# -*- encoding=utf-8 -*-
from hdfs.client import Client
client = Client("http://XXX.XXX.XX.XX:50070")
# 創(chuàng)建目錄
def mkdirs(client, hdfs_path):
client.makedirs(hdfs_path)
# 刪除hdfs文件
def delete_hdfs_file(client, hdfs_path):
client.delete(hdfs_path)
# 上傳文件到hdfs
def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True)
# 從hdfs獲取文件到本地
def get_from_hdfs(client, hdfs_path, local_path):
client.download(hdfs_path, local_path, overwrite=False)
# 追加數(shù)據(jù)到hdfs文件
def append_to_hdfs(client, hdfs_path, data):
client.write(hdfs_path, data, overwrite=False, append=True)
# 覆蓋數(shù)據(jù)寫到hdfs文件
def write_to_hdfs(client, hdfs_path, data):
client.write(hdfs_path, data, overwrite=True, append=False)
# 移動(dòng)或者修改文件
def move_or_rename(client, hdfs_src_path, hdfs_dst_path):
client.rename(hdfs_src_path, hdfs_dst_path)
# 返回目錄下的文件
def list(client, hdfs_path):
return client.list(hdfs_path, status=False)
if __name__ == '__main__':
# 調(diào)用
kk=list(client,"/user/admin/deploy/user_lable_dimension/")
for each in kk:
print(each)
?
1 往hdfs上傳文件
from hdfs.client import Client
"""往hdfs上傳文件"""
# TODO 往hdfs上傳文件
client = Client("http://XXX.XXX.XX.XX:50070")
# 新建文件夾
hdfs_path ="【文件要存放的目錄路徑,eg:/a/b/c】"
client.makedirs(hdfs_path)
print("uploading data...")
client.upload(hdfs_path, "intersection.xlsx", overwrite=True) # 資源中心上傳的文件
?文章來源:http://www.zghlxwxcb.cn/news/detail-514379.html
2 處理并存儲(chǔ)到hdfs
# TODO 先得到結(jié)果列表。eg:i_list
# TODO 把結(jié)果列表存儲(chǔ)成文件上傳到hdfs
print("===============================================")
i_df = pd.DataFrame(i_list)
client = Client("http://XXX.XXX.XX.XX:50070")
fout = "【文件要存放的路徑,eg:/a/b/c.csv】" # hdfs下的目錄
with client.write(fout, encoding='utf-8') as writer:
i_df.to_csv(writer)
print("存儲(chǔ)成功")
?文章來源地址http://www.zghlxwxcb.cn/news/detail-514379.html
3 讀取hdfs上的txt文件
from hdfs.client import Client
import json
from kafka import KafkaConsumer
import time
import pyhdfs
def GetEncodingSheme(_filename):
""" 查看文本編碼方式 """
with open(_filename, 'rb') as file:
buf = file.read()
result = chardet.detect(buf)
return result['encoding']
def read_hdfs_file(client, filename):
"""讀取hdfs文件內(nèi)容,將每行存入數(shù)組返回"""
lines = []
print("開始讀取txt數(shù)據(jù)")
with client.open(filename, delimiter='\n') as reader:
for line in reader:
lines.append(line.decode("GB2312").strip())
return lines
def deleteHDFSfile(client, hdfs_path):
"""刪除hdfs文件,刪除文件夾時(shí)該文件夾必須為空"""
client.delete(hdfs_path)
if __name__ == "__main__":
print(GetEncodingSheme('intersection.xlsx')) # GB2312
# hdfs連接
client = pyhdfs.HdfsClient(hosts="http://xxxxxx:50070,http://xxxxxx:50070", user_name="xxxxxx")
# TODO 讀取hdfs文件內(nèi)容,將每行存入數(shù)組返回
hdfs_path = "【文件路徑,eg:/a/b/c.xlsx】" # hdfs存儲(chǔ)目錄
print("===============================================")
print("開始讀取hdfs上的txt文件")
lines = read_hdfs_file(client, hdfs_path)
print(lines)
print("讀取完成")
print("===============================================")
# TODO 刪除hdfs存儲(chǔ)目錄下的文件
hdfs_path = "【文件路徑】"
deleteHDFSfile(client, hdfs_path)
到了這里,關(guān)于python hdfs遠(yuǎn)程連接以及上傳文件,讀取文件內(nèi)容,刪除文件的文章就介紹完了。如果您還想了解更多內(nèi)容,請(qǐng)?jiān)谟疑辖撬阉鱐OY模板網(wǎng)以前的文章或繼續(xù)瀏覽下面的相關(guān)文章,希望大家以后多多支持TOY模板網(wǎng)!