end0tknr's kipple - web写経開発

太宰府天満宮の狛犬って、妙にカワイイ

pypyodbc for python & win で、AS400のDB2 for i へodbc接続し、DDL等を出力

以下のpython scriptで、AS400DB2 for i にある 各db tableのテーブル定義やカラム定義の他、最終更新日時等の統計情報を TSV形式で出力できます

#!/usr/bin/python
# -*- coding: utf-8 -*-

# cf. https://qiita.com/KSD2015/items/fad9a443130b1a4f0c73
import pypyodbc
import csv
import datetime

db_conf = {
    "driver":"{iSeries Access ODBC Driver}",
    "hosts" : [
        {"name":"TOKYO","schemas":["????","????"],
         "ip":"10.209.???.???","uid" :"????","pwd":"????"},
    ]
}

def main():
    for as400 in db_conf["hosts"]:
        stat_as400_db2(as400)
        

def stat_as400_db2(as400):
    print( as400["name"] )

    db_con  = pypyodbc.connect(driver=db_conf["driver"],
                               system=as400["ip"],
                               uid   =as400["uid"],
                               pwd   =as400["pwd"] )
    print( "CONNECT DONE" )

    # 存在するschema一覧
    # tbl_schemas,schema_keys = get_tbl_schemas(db_con)

    # 各tableの定義や、統計情報
    tbl_stats,stats_keys = get_tbl_def_stats(db_con, as400["schemas"])
    
    tsv_basename = \
        "_".join(["tbl_stats",
                  as400["name"],
                  datetime.datetime.now().strftime("%Y%m%d") ])
    save_to_tsv( tsv_basename, tbl_stats, stats_keys )

    # 各tableのcolumn定義
    for tbl_def in tbl_stats:
        tbl_cols,col_keys = \
            get_tbl_cols(db_con,tbl_def["table_schema"],tbl_def["table_name"])
        tsv_basename = \
            "_".join(["cols",
                      as400["name"],
                      tbl_def["table_schema"],
                      tbl_def["table_name"],
                      datetime.datetime.now().strftime("%Y%m%d") ])
        save_to_tsv( tsv_basename, tbl_cols, col_keys )
    
    
def save_to_tsv( basename, tbl_rows, tbl_cols ):
    tsv_path = basename + ".tsv"
    
    with open(tsv_path, "w",encoding="utf-8") as f:
        writer = csv.DictWriter(f, fieldnames=tbl_cols, delimiter="\t")
        writer.writeheader()
        for tbl_row in tbl_rows:
            writer.writerow( tbl_row )
    return tsv_path
    
        
def fetch_db_cur(db_cur):
    ret_datas = []
    col_keys = [column[0] for column in db_cur.description]
    
    for col_vals in db_cur.fetchall():
        db_row = dict(zip(col_keys,col_vals))
        ret_datas.append( db_row )
    return ret_datas,col_keys
    
def get_tbl_cols(db_con,tbl_schema,tbl_name):
    db_cur = db_con.cursor()
    
    sql = """
SELECT  *
FROM QSYS2.SYSCOLUMNS
WHERE TABLE_SCHEMA=? AND TABLE_NAME =?
ORDER BY ORDINAL_POSITION
"""
    db_cur.execute( sql, (tbl_schema,tbl_name,))
    return fetch_db_cur(db_cur)
    
    
def get_tbl_schemas(db_con):
    db_cur = db_con.cursor()

    sql = """
SELECT TBL.TABLE_SCHEMA
FROM QSYS2.SYSTABLES TBL
GROUP BY TBL.TABLE_SCHEMA
ORDER BY TBL.TABLE_SCHEMA
"""
    db_cur.execute( sql )
    return fetch_db_cur(db_cur)
    
def get_tbl_def_stats(db_con,schemas):
    db_cur = db_con.cursor()

    sql_args = []
    sql_vals = []
    for schema in schemas:
        sql_args.append("?")
        sql_vals.append(schema)

    sql = """
SELECT TBL.*, ST.*
FROM QSYS2.SYSTABLES TBL
JOIN QSYS2.SYSTABLESTAT ST
  ON (TBL.TABLE_SCHEMA=ST.TABLE_SCHEMA AND TBL.TABLE_NAME=ST.TABLE_NAME)
WHERE TBL.TABLE_SCHEMA IN (%s)
ORDER BY TBL.TABLE_SCHEMA, TBL.TABLE_NAME
"""
    sql = sql % (",".join(sql_args) )
    db_cur.execute( sql, tuple(sql_vals) )
    return fetch_db_cur(db_cur)
    
if __name__ == '__main__':
    main()