Skip to content

Commit d3c9f58

Browse files
author
indra-ipd
committed
add app.py
1 parent 2eeb6a0 commit d3c9f58

File tree

2 files changed

+33
-12
lines changed

2 files changed

+33
-12
lines changed

app.py

Lines changed: 5 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -130,7 +130,11 @@ def evaluate_and_log(models, dataset, task_type, eval_output):
130130

131131
# Load images for selection
132132
def load_image(path):
133-
return Image.open(smiles_image_mapping[path]["image"])# Image.1open(path)
133+
try:
134+
return Image.open(smiles_image_mapping[path]["image"])# Image.1open(path)
135+
except:
136+
pass
137+
134138

135139

136140
# Function to handle image selection

models/fm4m.py

Lines changed: 28 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -328,14 +328,19 @@ def single_modal(model,dataset=None, downstream_model=None,params=None, x_train=
328328
reducer = umap.UMAP(metric='euclidean', n_neighbors=10, n_components=2, low_memory=True, min_dist=0.1,
329329
verbose=False)
330330
n_samples = np.minimum(1000, len(x_batch))
331-
features_umap = reducer.fit_transform(x_batch[:n_samples])
331+
332332
try:x = y_batch.values[:n_samples]
333333
except: x = y_batch[:n_samples]
334334
index_0 = [index for index in range(len(x)) if x[index] == 0]
335335
index_1 = [index for index in range(len(x)) if x[index] == 1]
336336

337-
class_0 = features_umap[index_0]
338-
class_1 = features_umap[index_1]
337+
try:
338+
features_umap = reducer.fit_transform(x_batch[:n_samples])
339+
class_0 = features_umap[index_0]
340+
class_1 = features_umap[index_1]
341+
except:
342+
class_0 = []
343+
class_1 = []
339344
print("Generating latent plots : Done")
340345

341346
#vizualize(roc_auc,fpr, tpr, x_batch, y_batch )
@@ -361,16 +366,23 @@ def single_modal(model,dataset=None, downstream_model=None,params=None, x_train=
361366
print("Generating latent plots")
362367
reducer = umap.UMAP(metric='euclidean', n_neighbors= 10, n_components=2, low_memory=True, min_dist=0.1, verbose=False)
363368
n_samples = np.minimum(1000,len(x_batch))
364-
features_umap = reducer.fit_transform(x_batch[:n_samples])
369+
365370
try:
366371
x = y_batch.values[:n_samples]
367372
except:
368373
x = y_batch[:n_samples]
369-
index_0 = [index for index in range(len(x)) if x[index] == 0]
370-
index_1 = [index for index in range(len(x)) if x[index] == 1]
371374

372-
class_0 = features_umap[index_0]
373-
class_1 = features_umap[index_1]
375+
try:
376+
features_umap = reducer.fit_transform(x_batch[:n_samples])
377+
index_0 = [index for index in range(len(x)) if x[index] == 0]
378+
index_1 = [index for index in range(len(x)) if x[index] == 1]
379+
380+
class_0 = features_umap[index_0]
381+
class_1 = features_umap[index_1]
382+
except:
383+
class_0 = []
384+
class_1 = []
385+
374386
print("Generating latent plots : Done")
375387

376388
#vizualize(roc_auc,fpr, tpr, x_batch, y_batch )
@@ -395,14 +407,19 @@ def single_modal(model,dataset=None, downstream_model=None,params=None, x_train=
395407
reducer = umap.UMAP(metric='euclidean', n_neighbors=10, n_components=2, low_memory=True, min_dist=0.1,
396408
verbose=False)
397409
n_samples = np.minimum(1000, len(x_batch))
398-
features_umap = reducer.fit_transform(x_batch[:n_samples])
410+
399411
try: x = y_batch.values[:n_samples]
400412
except: x = y_batch[:n_samples]
401413
#index_0 = [index for index in range(len(x)) if x[index] == 0]
402414
#index_1 = [index for index in range(len(x)) if x[index] == 1]
403415

404-
class_0 = features_umap#[index_0]
405-
class_1 = features_umap#[index_1]
416+
try:
417+
features_umap = reducer.fit_transform(x_batch[:n_samples])
418+
class_0 = features_umap#[index_0]
419+
class_1 = features_umap#[index_1]
420+
except:
421+
class_0 = []
422+
class_1 = []
406423
print("Generating latent plots : Done")
407424

408425
return result, RMSE_score,y_batch_test, y_prob, class_0, class_1

0 commit comments

Comments
 (0)