[go: nahoru, domu]

Skip to content

Commit

Permalink
Remove useless dirs
Browse files Browse the repository at this point in the history
  • Loading branch information
slowmoyang committed Nov 27, 2017
1 parent c8e41aa commit c790d63
Show file tree
Hide file tree
Showing 7 changed files with 696 additions and 0 deletions.
Binary file not shown.
34 changes: 34 additions & 0 deletions SJ-Keras/Proj-ChannelImportance/Untitled.ipynb
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"collapsed": true
},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.13"
}
},
"nbformat": 4,
"nbformat_minor": 2
}
131 changes: 131 additions & 0 deletions SJ-Keras/Proj-ChannelImportance/evaluation.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,131 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import os.path

import argparse
from datetime import datetime
from tqdm import tqdm
#import numpy as np
import matplotlib as mpl
mpl.use('Agg')

#import tensorflow as tf

import keras
#from keras import (
# optimizers,
# losses,
# metrics)
from keras.models import load_model
from keras.utils import multi_gpu_model

#import keras.backend as K

from models import add_an_sigmoid_layer
from pipeline import DataLodaer

#from custom_losses import binary_cross_entropy_with_logits
#from custom_metrics import accuracy_with_logits
from meters import ROCMeter, OutHist
from utils import (
get_log_dir,
get_saved_model_paths
)



def evaluate(saved_model_path,
step,
log_dir):
# TEST
logger = Logger(log_dir.path, "READ")

model_logit = load_model(saved_model_path)
model_sigmoid = add_an_sigmoid_layer(model_logit)

model = multi_gpu_model(model_sigmoid, 2)

out_hist = OutHist(
dpath=log_dir.output_histogram.path,
step=step,
dname_list=["train", "test_dijet", "test_zjet"])

# on training data
train_data_loader = DataLodaer(
path=logger["train_data"],
batch_size=1000,
cyclic=False)

for x, y in train_data_loader:
preds = model.predict_on_batch(x)
out_hist.fill(dname="train", labels=y, preds=preds)

# Test on dijet dataset
test_dijet_loader = DataLodaer(
path=logger["val_dijet_data"],
batch_size=1000,
cyclic=False)

roc_dijet = ROCMeter(
dpath=log_dir.roc.path,
step=step,
title="Test on Dijet",
prefix="dijet_"
)

for x, y in test_dijet_loader:
preds = model.predict_on_batch(x)

roc_dijet.append(labels=y, preds=preds)
out_hist.fill(dname="test_dijet", labels=y, preds=preds)

roc_dijet.finish()

# Test on Z+jet dataset
test_zjet_loader = DataLodaer(
path=logger["val_zjet_data"],
batch_size=1000,
cyclic=False)

roc_zjet = ROCMeter(
dpath=log_dir.roc.path,
step=step,
title="Test on Z+jet",
prefix="zjet_"
)

for x, y in test_zjet_loader:
preds = model.predict_on_batch(x)
roc_zjet.append(labels=y, preds=preds)
out_hist.fill("test_zjet", labels=y, preds=preds)

roc_zjet.finish()

out_hist.finish()


def main():
parser = argparse.ArgumentParser()

parser.add_argument(
'--log_dir', type=str, required=True,
help='the directory path of dataset')

args = parser.parse_args()

log_dir = get_log_dir(path=args.log_dir, creation=False)

path_and_step = get_saved_model_paths(log_dir.saved_models.path)
for i, (saved_model_path, step) in enumerate(path_and_step):
print("\n\n\n[{i}/{total}]: {path}".format(
i=i, total=len(path_and_step), path=saved_model_path))
evaluate(
saved_model_path,
step,
log_dir)


if __name__ == "__main__":
main()
104 changes: 104 additions & 0 deletions SJ-Keras/Proj-ChannelImportance/pipeline.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import ROOT
import numpy as np


class DataLoader(object):
def __init__(self, path, channel, batch_size, cyclic, image_shape=(1, 33, 33)):
self.path = path
self.channel = channel
self.batch_size = batch_size
self.cyclic = cyclic
self._image_shape = image_shape

c, h, w = image_shpae

channel = channel.lower()
if channel == "cpt":
self._slicing = slice(0, h*w)
elif channel == "npt":
self._slicing = slice(h*w, 2*h*w)
elif channel == "cmu":
self._slicing = slice(2*h*w, 3*h*w)
else:
raise ValueError("")


self.root_file = ROOT.TFile(path, "READ")
self.tree = self.root_file.Get("jet")

self._start = 0

def __len__(self):
return int(self.tree.GetEntries())

def _get_data(self, idx):
self.tree.GetEntry(idx)
image = np.array(self.tree.image, dtype=np.float32)
image = image[self._slicing].reshape(self._image_shape)
label = np.int64(self.tree.label[1])
return (image, label)


def __getitem__(self, key):
if isinstance(key, int):
if key < 0 or key >= len(self):
raise IndexError
return self._get_data(key)
elif isinstance(key, slice):
x = []
y = []
for idx in xrange(*key.indices(len(self))):
image, label = self._get_data(idx)
x.append(image)
y.append(label)
x = np.array(x)
y = np.array(y)
return (x, y)
else:
raise TypeError

def next(self):
if self.cyclic:
if self._start + 1 < len(self):
end = self._start + self.batch_size
slicing = slice(self._start, end)
if end <= len(self):
self._start = end
return self[slicing]
else:
x, y = self[slicing]

self._start = 0
end = end - len(self)

x1, y1 = self[slice(self._start, end)]
self._start = end

np.append(x, x1, axis=0)
np.append(y, y1, axis=0)
return x, y
else:
self._start = 0
return self.next()
else:
if self._start + 1 < len(self):
end = self._start + self.batch_size
slicing = slice(self._start, end)
self._start = end
return self[slicing]
else:
raise StopIteration

def __next__(self):
return self.next()

def __iter__(self):
for start in xrange(0, len(self), self.batch_size):
yield self[slice(start, start+self.batch_size)]



Loading

0 comments on commit c790d63

Please sign in to comment.