日韩性视频-久久久蜜桃-www中文字幕-在线中文字幕av-亚洲欧美一区二区三区四区-撸久久-香蕉视频一区-久久无码精品丰满人妻-国产高潮av-激情福利社-日韩av网址大全-国产精品久久999-日本五十路在线-性欧美在线-久久99精品波多结衣一区-男女午夜免费视频-黑人极品ⅴideos精品欧美棵-人人妻人人澡人人爽精品欧美一区-日韩一区在线看-欧美a级在线免费观看

歡迎訪問 生活随笔!

生活随笔

當前位置: 首頁 > 运维知识 > 数据库 >内容正文

数据库

mapreduce mysql_MapReduce直接连接MySQL获取数据

發布時間:2024/4/11 数据库 34 豆豆
生活随笔 收集整理的這篇文章主要介紹了 mapreduce mysql_MapReduce直接连接MySQL获取数据 小編覺得挺不錯的,現在分享給大家,幫大家做個參考.

MySQL中數據:

mysql> select * from linuxidc_tbls;

+---------------------+----------------+

| TBL_NAME? ? ? ? ? ? | TBL_TYPE? ? ? |

+---------------------+----------------+

| linuxidc_test_table? ? ? | EXTERNAL_TABLE |

| linuxidc_t? ? ? ? ? ? ? | MANAGED_TABLE? |

| linuxidc_t1? ? ? ? ? ? ? | MANAGED_TABLE? |

| tt? ? ? ? ? ? ? ? ? | MANAGED_TABLE? |

| tab_partition? ? ? | MANAGED_TABLE? |

| linuxidc_hbase_table_1? | MANAGED_TABLE? |

| linuxidc_hbase_user_info | MANAGED_TABLE? |

| t? ? ? ? ? ? ? ? ? | EXTERNAL_TABLE |

| linuxidc_jobid? ? ? ? ? | MANAGED_TABLE? |

+---------------------+----------------+

9 rows in set (0.01 sec)

mysql> select * from linuxidc_tbls where TBL_NAME like 'linuxidc%' order by TBL_NAME;

+---------------------+----------------+

| TBL_NAME? ? ? ? ? ? | TBL_TYPE? ? ? |

+---------------------+----------------+

| linuxidc_hbase_table_1? | MANAGED_TABLE? |

| linuxidc_hbase_user_info | MANAGED_TABLE? |

| linuxidc_jobid? ? ? ? ? | MANAGED_TABLE? |

| linuxidc_t? ? ? ? ? ? ? | MANAGED_TABLE? |

| linuxidc_t1? ? ? ? ? ? ? | MANAGED_TABLE? |

| linuxidc_test_table? ? ? | EXTERNAL_TABLE |

+---------------------+----------------+

6 rows in set (0.00 sec)

MapReduce程序代碼,ConnMysql.java:

package com.linuxidc.study;

import java.io.DataInput;

import java.io.DataOutput;

import java.io.IOException;

import java.net.URI;

import java.sql.PreparedStatement;

import java.sql.ResultSet;

import java.sql.SQLException;

import java.util.Iterator;

import org.apache.Hadoop.conf.Configuration;

import org.apache.hadoop.filecache.DistributedCache;

import org.apache.hadoop.fs.FileSystem;

import org.apache.hadoop.fs.Path;

import org.apache.hadoop.io.LongWritable;

import org.apache.hadoop.io.Text;

import org.apache.hadoop.io.Writable;

import org.apache.hadoop.mapreduce.Job;

import org.apache.hadoop.mapreduce.Mapper;

import org.apache.hadoop.mapreduce.Reducer;

import org.apache.hadoop.mapreduce.lib.db.DBConfiguration;

import org.apache.hadoop.mapreduce.lib.db.DBInputFormat;

import org.apache.hadoop.mapreduce.lib.db.DBWritable;

import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class ConnMysql {

private static Configuration conf = new Configuration();

static {

conf.addResource(new Path("F:/linuxidc-hadoop/hdfs-site.xml"));

conf.addResource(new Path("F:/linuxidc-hadoop/mapred-site.xml"));

conf.addResource(new Path("F:/linuxidc-hadoop/core-site.xml"));

conf.set("mapred.job.tracker", "10.133.103.21:50021");

}

public static class TblsRecord implements Writable, DBWritable {

String tbl_name;

String tbl_type;

public TblsRecord() {

}

@Override

public void write(PreparedStatement statement) throws SQLException {

// TODO Auto-generated method stub

statement.setString(1, this.tbl_name);

statement.setString(2, this.tbl_type);

}

@Override

public void readFields(ResultSet resultSet) throws SQLException {

// TODO Auto-generated method stub

this.tbl_name = resultSet.getString(1);

this.tbl_type = resultSet.getString(2);

}

@Override

public void write(DataOutput out) throws IOException {

// TODO Auto-generated method stub

Text.writeString(out, this.tbl_name);

Text.writeString(out, this.tbl_type);

}

@Override

public void readFields(DataInput in) throws IOException {

// TODO Auto-generated method stub

this.tbl_name = Text.readString(in);

this.tbl_type = Text.readString(in);

}

public String toString() {

return new String(this.tbl_name + " " + this.tbl_type);

}

}

public static class ConnMysqlMapper extends Mapper {

public void map(LongWritable key,TblsRecord values,Context context)

throws IOException,InterruptedException {

context.write(new Text(values.tbl_name), new Text(values.tbl_type));

}

}

public static class ConnMysqlReducer extends Reducer {

public void reduce(Text key,Iterable values,Context context)

throws IOException,InterruptedException {

for(Iterator itr = values.iterator();itr.hasNext();) {

context.write(key, itr.next());

}

}

}

public static void main(String[] args) throws Exception {

Path output = new Path("/user/linuxidc/output/");

FileSystem fs = FileSystem.get(URI.create(output.toString()), conf);

if (fs.exists(output)) {

fs.delete(output);

}

//mysql的jdbc驅動

DistributedCache.addFileToClassPath(new Path(

"hdfs://hd022-test.nh.sdo.com/user/liuxiaowen/mysql-connector-java-5.1.13-bin.jar"), conf);

DBConfiguration.configureDB(conf, "com.mysql.jdbc.Driver",

"jdbc:mysql://10.133.103.22:3306/hive", "hive", "hive");

Job job = new Job(conf,"test mysql connection");

job.setJarByClass(ConnMysql.class);

job.setMapperClass(ConnMysqlMapper.class);

job.setReducerClass(ConnMysqlReducer.class);

job.setOutputKeyClass(Text.class);

job.setOutputValueClass(Text.class);

job.setInputFormatClass(DBInputFormat.class);

FileOutputFormat.setOutputPath(job, output);

//列名

String[] fields = { "TBL_NAME", "TBL_TYPE" };

//六個參數分別為:

//1.Job;2.Class extends DBWritable>

//3.表名;4.where條件

//5.order by語句;6.列名

DBInputFormat.setInput(job, TblsRecord.class,

"linuxidc_tbls", "TBL_NAME like 'linuxidc%'", "TBL_NAME", fields);

System.exit(job.waitForCompletion(true) ? 0 : 1);

}

}

運行結果:

[www.linuxidc.com @linux ~]$ hadoop fs -cat /user/linuxidc/output/part-r-00000

linuxidc_hbase_table_1? ? ? MANAGED_TABLE

linuxidc_hbase_user_info? ? MANAGED_TABLE

linuxidc_jobid? ? ? MANAGED_TABLE

linuxidc_t? MANAGED_TABLE

linuxidc_t1? MANAGED_TABLE

linuxidc_test_table? EXTERNAL_TABLE

推薦閱讀:

總結

以上是生活随笔為你收集整理的mapreduce mysql_MapReduce直接连接MySQL获取数据的全部內容,希望文章能夠幫你解決所遇到的問題。

如果覺得生活随笔網站內容還不錯,歡迎將生活随笔推薦給好友。