hadoop 》》 django 简单操作hdfs 语句
》》
from django.shortcuts import render
# Create your views here. from hdfs.client import Client
from django.views import View
from hdfs.client import Client
import os #
# # 关于python操作hdfs的API可以查看官网:
# # https://hdfscli.readthedocs.io/en/latest/api.html
#
# # 读取hdfs文件内容,将每行存入数组返回
# def read_hdfs_file(client, filename):
# lines = []
# with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
# for line in reader:
# # pass
# # print line.strip()
# lines.append(line.strip())
# return lines
#
#
# # 创建目录
# def mkdirs(client, hdfs_path):
# client.makedirs(hdfs_path)
#
#
# # 删除hdfs文件
# def delete_hdfs_file(client, hdfs_path):
# client.delete(hdfs_path)
#
#
# # 上传文件到hdfs
def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True) #
# # 从hdfs获取文件到本地
# def get_from_hdfs(client, hdfs_path, local_path):
# download(hdfs_path, local_path, overwrite=False)
#
#
# # 追加数据到hdfs文件
# def append_to_hdfs(client, hdfs_path, data):
# client.write(hdfs_path, data, overwrite=False, append=True)
#
#
# # 覆盖数据写到hdfs文件
def write_to_hdfs(client, hdfs_path, data):
client.write(hdfs_path, data, overwrite=True, append=False) #
# # 移动或者修改文件
def move_or_rename(client, hdfs_src_path, hdfs_dst_path):
client.rename(hdfs_src_path, hdfs_dst_path)
#
#
# # 返回目录下的文件
# def list(client, hdfs_path):
# return client.list(hdfs_path, status=False)
#
#
# # root:连接的跟目录
# client = Client("http://192.168.88.129:50070",
# root="/", timeout=5 * 1000, session=False)
# # put_to_hdfs(client,'a.csv','/user/root/a.csv')
# # append_to_hdfs(client,'/b.txt','111111111111111'+'\n')
# # write_to_hdfs(client,'/b.txt','222222222222'+'\n')
# # move_or_rename(client,'/b.txt', '/user/b.txt')
# mkdirs(client, '/input1/python1')
# print(list(client,'/input'))
# read_hdfs_file(client,'/')
# client.list("/")
def mkdirs(client, hdfs_path):
client.makedirs(hdfs_path) def get_IS_File(client,hdfs_paht):
return client.status(hdfs_paht)['type'] == 'FILE'
from hadoop_hdfs.settings import UPLOAD_ROOT
import os def put_to_hdfs(client, local_path, hdfs_path):
client.upload(hdfs_path, local_path, cleanup=True)
client = Client("http://192.168.88.129:50070",
root="/", timeout=5 * 1000, session=False)
class Index(View):
def get(self,request):
return render(request,"index.html")
def post(self,request):
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close() def read_hdfs_file(client, filename):
lines = []
with client.read(filename, encoding='utf-8', delimiter='\n') as reader:
for line in reader:
# pass
# print line.strip()
lines.append(line.strip())
return lines file = request.FILES.get("file")
uploadfile(file)
all_file = client.list("/")
for i in all_file:
file_true =get_IS_File(client,"/{}".format(i)),i
print(file_true)
# if file_true == "True":
return render(request,"index.html",locals())
# else:
# pass
# # data = {"file_true":file_true}
# return render(request,"index.html",locals())
# else:
# pass
# get_IS_File(all_file,"/")
# lujin = "/upload/"+file.name
mkdirs(client,file.name)
# move_or_rename(client,file.name, '/c.txt')
# put_to_hdfs(client, "C:/Users/Lenovo/Desktop/hadoop_hdfs/upload/"+file.name, '/')
# write_to_hdfs(client,"upload"+file.name,'222222222222'+'\n')
return render(request,"index.html",locals())
Views.py
# 导入必要模块
import pandas as pd
from sqlalchemy import create_engine
from matplotlib import pylab as plt
from django.views import View
from django.shortcuts import render
import os
from hadoop_hdfs.settings import UPLOAD_ROOT
from web.models import *
def uploadfile(img):
f = open(os.path.join(UPLOAD_ROOT, '', img.name), 'wb')
for chunk in img.chunks():
f.write(chunk)
f.close()
class Upload(View):
def get(self,request):
# show = Image.objects.all()
return render(request,"haha.html",locals())
def post(self,request):
imgs = request.FILES.get('img')
uploadfile(imgs)
all = Image(img="/upload/"+imgs.name)
all.save()
return render(request,"haha.html")
# def post(self,request):
# # 初始化数据库连接,使用pymysql模块
# # MySQL的用户:root, 密码:147369, 端口:3306,数据库:mydb
# engine = create_engine('mysql+pymysql://root:@127.0.0.1/dj')
#
# # 查询语句,选出employee表中的所有数据
# sql = '''select * from haha;'''
#
# # read_sql_query的两个参数: sql语句, 数据库连接
# df = pd.read_sql_query(sql, engine)
#
# # 输出employee表的查询结果
# print(df)
#
# # 新建pandas中的DataFrame, 只有id,num两列
# # df = pd.DataFrame({'id':[1,2],'name': ['111','222'],'image_url':['http://111','http://222']})
# # df = pd.DataFrame({"id":df['sentiment'],"text":df['text'],})
# df.groupby(by='sentiment').count()['text'].plot.pie(autopct="%0.4f%%", subplots=True)
# plt.savefig("../upload/1.jpg")
# plt.show()
# # 将新建的DataFrame储存为MySQL中的数据表,不储存index列
# # df.to_sql(name='lallala', con=engine, index=False)
#
# print('Read from and write to Mysql table successfully!')
# return render(request,"haha.html",locals()) class Uploads(View):
def get(self,request):
show = Image.objects.all()
return render(request,"haha.html",locals())
haha.py
hadoop 》》 django 简单操作hdfs 语句的更多相关文章
- Java 简单操作hdfs API
注:图片如果损坏,点击文章链接:https://www.toutiao.com/i6632047118376780295/ 启动Hadoop出现问题:datanode的clusterID 和 name ...
- Hadoop Java API操作HDFS文件系统(Mac)
1.下载Hadoop的压缩包 tar.gz https://mirrors.tuna.tsinghua.edu.cn/apache/hadoop/common/stable/ 2.关联jar包 在 ...
- java 简单操作HDFS
创建java 项目 package com.yw.hadoop273; import org.apache.hadoop.conf.Configuration; import org.apache.h ...
- Django简单操作
一.静态文件配置 静态文件配置 STATIC_URL = '/static/' STATICFILES_DIRS = [ os.path.join(BASE_DIR,'static') ] # 暴露给 ...
- hadoop 使用java操作hdfs
1.创建目录 import java.io.IOException; import org.apache.hadoop.conf.Configuration; import org.apache.ha ...
- mysql 首次安装后 简单操作与语句 新手入门
首先cd到安装目录中bin路径:这是我的安装路径以管理员身份打开cmd(防止权限不足)cd E:\>cd E:\mysql\mysql-5.5.40-winx64\bin 首次安装需要输入 my ...
- python django简单操作
准备: pip3 install django==1.10.3 cmd django-admin startproject guest 创建一个guest的项目 cd guest manage. ...
- HDFS介绍及简单操作
目录 1.HDFS是什么? 2.HDFS设计基础与目标 3.HDFS体系结构 3.1 NameNode(NN)3.2 DataNode(DN)3.3 SecondaryNameNode(SNN)3.4 ...
- Django简单的数据库操作
当然,本篇的前提是你已经配置好了相关的环境,这里就不详细介绍. 一. 在settings.py文件中设置数据库属性. 如下: DATABASES = { 'default': { 'ENGINE': ...
随机推荐
- 十七、程序包管理之yum和编译安装
YUM Redhat二次发行版 Yellow Dog Linux,上使用的yum(Yellowdog Update Modifier),由yellow dog 研发 yum命令的工作原理 1.接收用户 ...
- eclipse使用正则表达式查找替换
1,Eclipse ctrl+f 打开查找框2,选中 Regular expressions (正则表达式) 去掉/* */(eclipse) /\*(.|[\r\n])*?\*/去掉/ ...
- [.NET] ConfuserEx脱壳工具打包
[.NET] ConfuserEx脱壳工具打包 ConfuserEx 1.0.0脱壳步骤 Written by 今夕何夕[W.B.L.E. TeAm] 1.先用UnconfuserEx把 ...
- OpenStack v.s. Kubernetes
目录 文章目录 目录 What are the differences with OpenStack and Kubernetes? Why OpenStack & Kubernetes? W ...
- Qt编写自定义控件36-图片浏览器
一.前言 本控件主要用来作为一个简单的图片浏览器使用,可以上下翻页显示图片,图片还可以开启过度效果比如透明度渐变,应用场景有查看报警图片运行图片等.此控件非本人原创,来源于网络,我只是修正了好多处BU ...
- JAVA 基础编程练习题18 【程序 18 乒乓球赛】
18 [程序 18 乒乓球赛] 题目:两个乒乓球队进行比赛,各出三人.甲队为 a,b,c 三人,乙队为 x,y,z 三人.已抽签决定比赛名单. 有人向队员打听比赛的名单.a 说他不和 x 比,c 说他 ...
- tmpfs使用完毕导致数据库无法正常工作
df -h 查看 重新启动服务器就可以了
- 如何为ubuntu等Linux系统扩容(LVM)
第一步:磁盘分区 fdisk /dev/sdb root@ubuntu:/home/ubuntu# fdisk /dev/sdb Welcome to fdisk (util-linux 2.27.1 ...
- swift 第七课 xib 约束的优先级
前期要实现 一个自适应的label 的时候,就知到xib 约束是有优先级的,一直为深入研究: 乘写这个 博客的机会 ,试验下xib 约束的等级 …… 抱歉要查资料,在重新实践,先把主要浏览的网页 连接 ...
- swift 第三课 宏定义 宏方法
swift 与oc 不同,没有宏的定义就像 oc 可以这样写,直接调用: /* 默认颜色 */ #define RGBCOLOR_HEX(h) RGBCOLOR((((h)>>16)&am ...