import os
import math
import numpy as np

def load_map_from_csv(path):
    return np.loadtxt(path, dtype='int64', delimiter=',')

def get_file_path(directory_path, reverse = False):
    listdir = os.listdir(directory_path)
    file_names = sorted([f for f in listdir if os.path.isfile(os.path.join(directory_path, f))], reverse = reverse)
    return [ os.path.abspath('{}{}{}'.format(directory_path, '/', f)) for f in file_names]

# 最近傍補間法でリサイズ
def resize_nearest(src, h, w):
    # 出力画像用の配列生成(要素は全て空)
    dst = np.empty((h,w))

    # 元画像のサイズを取得
    hi, wi = src.shape[0], src.shape[1]

    # 拡大率を計算
    ax = w / float(wi)
    ay = h / float(hi)

    # 最近傍補間
    for y in range(0, h):
        for x in range(0, w):
            xi, yi = int(round(x/ax)), int(round(y/ay))
            # 存在しない座標の処理
            if xi > wi -1: xi = wi -1
            if yi > hi -1: yi = hi -1

            dst[y][x] = src[yi][xi]

    return dst

def noize(s, mu=0):
# 逆変換法で一様分布からロジスティック分布を得る
    np.random.seed()
    N = 1
    U = np.random.uniform(0.0, 1.0, N)
# ロジスティック分布の累積分布関数の逆関数を用いて変換
    V = mu + s * np.log(U / (1 - U))
    return V[0]

def calc_entropy(count_map, conner_points, k=1):
    total_count = np.sum(count_map)
    entropy = 0
    for conner_point in conner_points:
        p = count_map[conner_point.y, conner_point.x] / total_count
        #確率0の時は情報量0とする
        if p > 0:
            entropy += - p * math.log(p)

    return entropy * k