DataX示范配置

{
  "job": {
    "content": [
      {
        "reader": {
          "name": "mysqlreader",
          "parameter": {
            "column": ["id","name","region_id","area_code","iso_code","iso_3166_2"],
            "connection": [
              {
                "jdbcUrl": ["jdbc:mysql://hadoop102:3306/business_db"],
                "table": ["base_province"]
              }
            ],
            "password": "123321",
            "username": "root",
            "where": "id>=3",
            "splitPk": ""
          }
        },
        "writer": {
          "name": "hdfswriter",
          "parameter": {
            "column": [
              {"name": "id","type": "bigint"},
              {"name": "name","type": "string"},
              {"name": "region_id","type": "string"},
              {"name": "area_code","type": "string"},
              {"name": "iso_code","type": "string"},
              {"name": "iso_3166_2","type": "string"}
            ],
            "compress": "gzip",
            "defaultFS":"hdfs://mycluster",
            "hadoopConfig":{
              "dfs.nameservices": "mycluster",
              "dfs.ha.namenodes.mycluster": "nn1,nn2,nn3",
              "dfs.namenode.rpc-address.mycluster.nn1": "hadoop102:8020",
              "dfs.namenode.rpc-address.mycluster.nn2": "hadoop103:8020",
              "dfs.namenode.rpc-address.mycluster.nn3": "hadoop104:8020",
              "dfs.client.failover.proxy.provider.mycluster": "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider"
            },
            "fieldDelimiter": "\t",
            "fileName": "baseprovince",
            "fileType": "text",
            "path": "/base_province",
            "writeMode": "append"
          }
        }
      }
    ],
    "setting": {
      "speed": {
        "channel": "1"
      }
    }
  }
}

参考文档:DataX/introduction.md at master · alibaba/DataX · GitHub

posted @ 2022-08-26 22:46  Avery_rainys  阅读(243)  评论(0)    收藏  举报