import sys

import json

import pymongo

import datetime

from pymongo import MongoClient

client = MongoClient('mongodb://192.168.1.31:20000,192.168.1.34:20000')

db = client.RHY

collection = db.ST_RIVER_R

f = open("D:/bigdata/st_river_r.CSV")

line = f.readline()

print(line)

fieldNames = line.split(',')

# STCD,TM,Z,Q,XSA,XSAVV,XSMXV,FLWCHRCD,WPTN,MSQMT,MSAMT,MSVMT

line = f.readline()

count = 0

records = []

insertCount = 0

while line:
     #
     count = count + 1
     fieldValues = line.split(',')
     if len(fieldValues) == 12 or fieldValues[0].strip() != '':
         insertObj = {}
         STCD = fieldValues[0]
         insertObj['STCD'] = STCD
         TM = fieldValues[1]
         if TM.strip() != '':
             TM = datetime.datetime.strptime(TM, '%Y-%m-%d %H:%M:%S')
             insertObj['TM'] = TM
         Z = fieldValues[2]
         if Z.strip() != '':
             Z = float(Z)
             insertObj['Z'] = Z
         Q = fieldValues[3]
         if Q.strip() != '':
             Q = float(Q)
             insertObj['Q'] = Q
         # XSA
         XSA = fieldValues[4]
         if XSA.strip() != '':
             XSA = float(XSA)
             insertObj['XSA'] = XSA
         # XSAVV
         XSAVV = fieldValues[5]
         if XSAVV.strip() != '':
             XSAVV = float(XSAVV)
             insertObj['XSAVV'] = XSAVV
         #
         XSMXV = fieldValues[6]
         if XSMXV.strip() != '':
             XSMXV = float(XSMXV)
             insertObj['XSMXV'] = XSMXV
         #
         FLWCHRCD = fieldValues[7]
         if FLWCHRCD.strip() != '':
             insertObj['FLWCHRCD'] = FLWCHRCD
         #
         WPTN = fieldValues[8]
         if WPTN.strip() != '':
             insertObj['WPTN'] = WPTN
         #
         MSQMT = fieldValues[9]
         if MSQMT.strip() != '':
             insertObj['MSQMT'] = MSQMT
         #
         MSAMT = fieldValues[10]
         if MSAMT.strip() != '':
             insertObj['MSAMT'] = MSAMT
         #
         MSVMT = fieldValues[11]
         if MSVMT.strip() != '':
             insertObj['MSVMT'] = MSVMT
         #
         # collection.insert_one(insertObj)
         # collection.insert_many(new_posts)
         records.append(insertObj)
         if len(records) == 1000:
             insertCount = insertCount + 1
             if count > 1451000:
                 collection.insert_many(records)
                 print(str(count) + '  ' + str(insertCount))
             print(count)
             records = []
     else:
         print(line)
     #
     line = f.readline()

f.close()

client.close()

------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------

import sys

import json

import math

import copy

import pymongo

import datetime

from pymongo import MongoClient

import shapefile

import pymysql

sf = shapefile.Reader(r'E:/Ambari/ubuntu/mapdata/aircraftPositionLine50.shp')

fields = sf.fields

shapes = sf.shapes()

count = len(shapes)

print('count: ' + str(count))

fieldName = []

for index in range(len(fields)):
     if index > 0:
         field = fields[index]
         # print(field)
         fieldName.append(field[0])

#print(fieldName)

#

db = pymysql.connect("127.0.0.1","root","gis","acms" )

cursor = db.cursor()

sql = "INSERT INTO airline_r(id, code, name, time_index, x, y, z, angle) VALUES (%s, %s, %s, %s, %s, %s, %s, %s)"
  

for index in range(count):
     preX = None
     preY = None
     preZ = None
     angle = None

features = []
     record = sf.record(index)
     attribute = record[0:len(fields)]
     attribute[0] = index
     print(attribute)
     shap = shapes[index]
     points = shap.points
     pointCount = len(points)

for i in range(pointCount):
         coordinate = shap.points[i]
         x = coordinate[0]
         y = coordinate[1]
         z = (0 if (len(coordinate) < 3) else coordinate[2])
         if preX != None:
             angle = math.atan2(y-preY, x - preX)
             feature = copy.deepcopy(attribute)
             feature.append(i-1)
             feature.append(preX)
             feature.append(preY)
             feature.append(preZ)
             feature.append(angle)
             print(feature)
             features.append(tuple(feature))
             #cursor.execute(sql % tuple(feature))
             #cursor.execute(sql, feature)
         if i == pointCount -1:
             feature = copy.deepcopy(attribute)
             feature.append(i)
             feature.append(x)
             feature.append(y)
             feature.append(z)
             feature.append(angle)
             print(feature)
             features.append(tuple(feature))
             #cursor.execute(sql % tuple(feature))
             #cursor.execute(sql, feature)
         preX = x
         preY = y
         preZ = z
     #print(features)
     cursor.executemany(sql, features)
     db.commit()
     '''  
     try:
         # 执行sql语句
         cursor.executemany(sql, features)
         # 提交到数据库执行
         db.commit()
     except:
         # 如果发生错误则回滚
         print()
         db.rollback()
     '''

# 关闭数据库连接

db.close()

'''

client = MongoClient('mongodb://192.168.1.31:20000,192.168.1.34:20000')

db = client.RHY

collection = db.ST_RIVER_R

f = open("D:/bigdata/st_river_r.CSV")

line = f.readline()

print(line)

fieldNames = line.split(',')

# STCD,TM,Z,Q,XSA,XSAVV,XSMXV,FLWCHRCD,WPTN,MSQMT,MSAMT,MSVMT

line = f.readline()

count = 0

records = []

insertCount = 0

while line:
     #
     count = count + 1
     fieldValues = line.split(',')
     if len(fieldValues) == 12 or fieldValues[0].strip() != '':
         insertObj = {}
         STCD = fieldValues[0]
         insertObj['STCD'] = STCD
         TM = fieldValues[1]
         if TM.strip() != '':
             TM = datetime.datetime.strptime(TM, '%Y-%m-%d %H:%M:%S')
             insertObj['TM'] = TM
         Z = fieldValues[2]
         if Z.strip() != '':
             Z = float(Z)
             insertObj['Z'] = Z
         Q = fieldValues[3]
         if Q.strip() != '':
             Q = float(Q)
             insertObj['Q'] = Q
         # XSA
         XSA = fieldValues[4]
         if XSA.strip() != '':
             XSA = float(XSA)
             insertObj['XSA'] = XSA
         # XSAVV
         XSAVV = fieldValues[5]
         if XSAVV.strip() != '':
             XSAVV = float(XSAVV)
             insertObj['XSAVV'] = XSAVV
         #
         XSMXV = fieldValues[6]
         if XSMXV.strip() != '':
             XSMXV = float(XSMXV)
             insertObj['XSMXV'] = XSMXV
         #
         FLWCHRCD = fieldValues[7]
         if FLWCHRCD.strip() != '':
             insertObj['FLWCHRCD'] = FLWCHRCD
         #
         WPTN = fieldValues[8]
         if WPTN.strip() != '':
             insertObj['WPTN'] = WPTN
         #
         MSQMT = fieldValues[9]
         if MSQMT.strip() != '':
             insertObj['MSQMT'] = MSQMT
         #
         MSAMT = fieldValues[10]
         if MSAMT.strip() != '':
             insertObj['MSAMT'] = MSAMT
         #
         MSVMT = fieldValues[11]
         if MSVMT.strip() != '':
             insertObj['MSVMT'] = MSVMT
         #
         # collection.insert_one(insertObj)
         # collection.insert_many(new_posts)
         records.append(insertObj)
         if len(records) == 1000:
             insertCount = insertCount + 1
             if count > 1451000:
                 collection.insert_many(records)
                 print(str(count) + '  ' + str(insertCount))
             print(count)
             records = []
     else:
         print(line)
     #
     line = f.readline()

f.close()

client.close()

'''

将数据导入MongoDB集群与MySQL的更多相关文章

  1. sqoop将oracle数据导入hdfs集群

    使用sqoop将oracle数据导入hdfs集群 集群环境: hadoop1.0.0 hbase0.92.1 zookeeper3.4.3 hive0.8.1 sqoop-1.4.1-incubati ...

  2. 使用pandas把mysql的数据导入MongoDB。

    使用pandas把mysql的数据导入MongoDB. 首先说下我的需求,我需要把mysql的70万条数据导入到mongodb并去重, 同时在第二列加入一个url字段,字段的值和第三列的值一样,代码如 ...

  3. rancher导入k8s集群后添加监控无数据

    1.日志报错 rancher导入k8s集群后添加监控无数据,rancher日志报错: k8s.io/kube-state-metrics/pkg/collectors/builder.go:: Fai ...

  4. mongodb集群安装及到现在遇到的一些问题

    集群搭建 只有3台服务器,开始搭建mongodb集群里主要参照的是http://www.lanceyan.com/tech/arch/mongodb_shard1.html,端口的设置也是mongos ...

  5. 搭建高可用mongodb集群(四)—— 分片(经典)

    转自:http://www.lanceyan.com/tech/arch/mongodb_shard1.html 按照上一节中<搭建高可用mongodb集群(三)-- 深入副本集>搭建后还 ...

  6. [转]搭建高可用mongodb集群(四)—— 分片

    按照上一节中<搭建高可用mongodb集群(三)—— 深入副本集>搭建后还有两个问题没有解决: 从节点每个上面的数据都是对数据库全量拷贝,从节点压力会不会过大? 数据压力大到机器支撑不了的 ...

  7. 搭建高可用mongodb集群(四)—— 分片

    按照上一节中<搭建高可用mongodb集群(三)—— 深入副本集>搭建后还有两个问题没有解决: 从节点每个上面的数据都是对数据库全量拷贝,从节点压力会不会过大? 数据压力大到机器支撑不了的 ...

  8. 搭建高可用mongodb集群(三)—— 深入副本集内部机制

    在上一篇文章<搭建高可用mongodb集群(二)—— 副本集> 介绍了副本集的配置,这篇文章深入研究一下副本集的内部机制.还是带着副本集的问题来看吧! 副本集故障转移,主节点是如何选举的? ...

  9. 搭建高可用mongodb集群(一)——配置mongodb

    在大数据的时代,传统的关系型数据库要能更高的服务必须要解决高并发读写.海量数据高效存储.高可扩展性和高可用性这些难题.不过就是因为这些问题Nosql诞生了. NOSQL有这些优势: 大数据量,可以通过 ...

随机推荐

  1. xamarin android 实现二维码带logo生成效果

    MultiFormatWriter writer = new MultiFormatWriter(); Dictionary<EncodeHintType, object> hint = ...

  2. C语言写了一个socket server端,适合windows和linux,用GCC编译运行通过

    ////////////////////////////////////////////////////////////////////////////////* gcc -Wall -o s1 s1 ...

  3. sql练习(针对Mysql)

    创建表: DROP TABLE DEPT; --部门表 CREATE TABLE DEPT( DEPTNO int PRIMARY KEY, DNAME ) , --部门名称 LOC ) ---部门地 ...

  4. (转)SSL/TLS 漏洞“受戒礼”,RC4算法关闭

    原文:https://blog.csdn.net/Nedved_L/article/details/81110603 SSL/TLS 漏洞“受戒礼” 一.漏洞分析事件起因2015年3月26日,国外数据 ...

  5. spring 资源访问

    spring 资源访问 Resource resource=null; //访问网络资源 resource=new UrlResource("file:bool.xml"); // ...

  6. Spring Boot + Spring Cloud 实现权限管理系统 后端篇(九):代码整理优化

    工程规划 为了统一配置和代码解耦,我们对代码重新进行了整理和规划. 重新规划后,代码结构如下: kitty-pom: 统一管理 Maven 版本,打包配置 kitty-common: 公共代码模块,主 ...

  7. tomcat 启动速度慢背后的真相

    1. tomcat 启动慢 在线上环境中,我们经常会遇到类似的问题,就是tomcat 启动比较慢,查看内存和cpu,io都是正常的,但是启动很慢,有的时候长达几分钟,这到底是什么原因导致的. 1.1 ...

  8. Mysql的select in会自动过滤重复的数据

    默认使用 SELECT 语句: 当加上in范围后,结果如下图: in范围内的数据,如果有重复的,只会选择第一个数据. 所以如果不是直接使用SQL语句来查询,而是在代码中来查询时,记得使用 distin ...

  9. 读jQuery源码释疑笔记2

    本释疑笔记是针对自己在看源码的过程中遇到的一些问题的解答,对大众可能不具有参考性,不过可以看看有没有你也不懂得地方,相互学习,相互进步. 1.函数init <div id="one&q ...

  10. Oracle XE快捷版(速成版)的限制

    1.CPU上限:无论把数据库安装在多少核的服务器上,都只会提供一个CPU核心的运算能力 2.安装和执行限制:只能安装一个实例且只能运行一个实例 3.用户数据上限:最大11G的用户数据 4.内存使用上限 ...