国产探花免费观看_亚洲丰满少妇自慰呻吟_97日韩有码在线_资源在线日韩欧美_一区二区精品毛片,辰东完美世界有声小说,欢乐颂第一季,yy玄幻小说排行榜完本

首頁 > 編程 > Python > 正文

python hbase讀取數據發送kafka的方法

2020-02-16 00:21:37
字體:
來源:轉載
供稿:網友

本例子實現從hbase獲取數據,并發送kafka。

使用

#!/usr/bin/env python#coding=utf-8 import sysimport timeimport json sys.path.append('/usr/local/lib/python3.5/site-packages')from thrift import Thriftfrom thrift.transport import TSocketfrom thrift.transport import TTransportfrom thrift.protocol import TBinaryProtocolfrom hbase1 import Hbase #調用hbase thrif1from hbase1.ttypes import *from kafka import KafkaConsumerfrom kafka import KafkaProducerfrom kafka.errors import KafkaErrorimport unittest class HbaseOpreator: def __init__(self,host,port,table='test'):  self.tableName=table  self.transport=TTransport.TBufferedTransport(TSocket.TSocket(host,port))  self.protocol=TBinaryProtocol.TBinaryProtocol(self.transport)  self.client=Hbase.Client(self.protocol)  self.transport.open()  def __del__(self):  self.transport.close()   def scanTablefilter(self,table,*args):  d=dict()   L=[]  try:   tableName=table   # scan = Hbase.TScan(startRow, stopRow)   scan=TScan()   #主鍵首字母123   # filter = "PrefixFilter('123_')"   # filter = "RowFilter(=,'regexstring:.aaa')"   #過濾條件,當前為 statis_date 字段,值為20170223   # fitler = "SingleColumnValueFilter(tableName,'f','statis_date','20170223')"   # filter="SingleColumnValueFilter('f','statis_date',=,'binary:20170223') AND SingleColumnValueFilter('f','name',=,'binary:LXS')"   filter="SingleColumnValueFilter('info','name',=,'binary:lilei') OR SingleColumnValueFilter('info','name',=,'binary:lily')"   scan.filterString=filter   id=self.client.scannerOpenWithScan(tableName,scan,None)   result=self.client.scannerGet(id)   # result=self.client.scannerGetList(id,100)   while result:    for r in result:     key=r.row     name=r.columns.get('info:name').value     age=r.columns.get('info:age').value     phone=r.columns.get('info:phone').value     d['key']=key     d['name']=name     d['age']=age     d['phone']=phone     # encode_result_json=json.dumps(d).encode(encoding="utf-8")     # print(encode_result_json)     L.append(d)             result=self.client.scannerGet(id)       return json.dumps(L).encode(encoding="utf-8")    finally:   # self.client.scannerClose(scan)   print("scan finish") def sendKfafkaProduct(data): # self.host_port='localhost:9092' producer = KafkaProducer(bootstrap_servers=['localhost:9092']) for d in data:  producer.send('test', key=b'lxs', value=d)  time.sleep(5)  print(d)  while True:  producer.send('test', key=b'lxs', value=data)  time.sleep(5)  print(data) if __name__== '__main__': # unittest.main()  B=HbaseOpreator('10.27.1.138',9090) value=B.scanTablefilter('ns_lbi:test_hbase_student') print(value) #sendKfafkaProduct(value) 

以上這篇python hbase讀取數據發送kafka的方法就是小編分享給大家的全部內容了,希望能給大家一個參考,也希望大家多多支持武林站長站。

發表評論 共有條評論
用戶名: 密碼:
驗證碼: 匿名發表
主站蜘蛛池模板: 梨树县| 五原县| 仪陇县| 宁晋县| 社旗县| 乃东县| 晴隆县| 攀枝花市| 环江| 舟曲县| 太保市| 沁水县| 新安县| 河津市| 台州市| 东港市| 临沂市| 黎川县| 滨州市| 舟曲县| 通许县| 墨江| 庆阳市| 淳安县| 兰考县| 高青县| 乌兰县| 台中县| 绍兴县| 汤阴县| 专栏| 万安县| 东台市| 开鲁县| 临江市| 怀化市| 通榆县| 光山县| 济源市| 海盐县| 崇义县|