Reputation: 87
I've been getting this error 'module 'tensorflow' has no attribute 'get_default_graph' every time I try to run my model on Keras, and I've tried pretty much everything in previous answers. I'm attempting to create a 3D-CNN using the Keras backend. It worked for the past few days, but yesterday I started to get this error every time I tried to create this model. Here's my code:
# importing important packages
import os
import numpy as np
import tensorflow as tf
import keras
from keras.models import Sequential, Model
from keras.layers import Dense, Flatten, Conv3D, MaxPooling3D, Dropout, BatchNormalization, LeakyReLU
from tensorflow.python.keras import backend as K
from keras.regularizers import l2
from sklearn.utils import compute_class_weight
#import dataset
import numpy as np
DATA_URL = '/content/drive/My Drive/icafiledata4.npz'
with np.load(DATA_URL) as data:
X = data['arr_0']
y = data['arr_1']
BATCH_SIZE = 128
input_shape=(64, 64, 40, 20)
# Create the model
model = Sequential()
model.add(Conv3D(64, kernel_size=(3,3,3), activation='relu', input_shape=input_shape, kernel_regularizer=l2(0.005), bias_regularizer=l2(0.005), data_format = 'channels_first', padding='same'))
model.add(MaxPooling3D(pool_size=(2, 2, 2)))
model.add(Conv3D(64, kernel_size=(3,3,3), activation='relu', input_shape=input_shape, kernel_regularizer=l2(0.005), bias_regularizer=l2(0.005), data_format = 'channels_first', padding='same'))
model.add(MaxPooling3D(pool_size=(2, 2, 2)))
model.add(BatchNormalization(center=True, scale=True))
model.add(Conv3D(64, kernel_size=(3,3,3), activation='relu', input_shape=input_shape, kernel_regularizer=l2(0.005), bias_regularizer=l2(0.005), data_format = 'channels_first', padding='same'))
model.add(MaxPooling3D(pool_size=(2, 2, 2)))
model.add(Conv3D(64, kernel_size=(3,3,3), activation='relu', input_shape=input_shape, kernel_regularizer=l2(0.005), bias_regularizer=l2(0.005), data_format = 'channels_first', padding='same'))
model.add(MaxPooling3D(pool_size=(2, 2, 2)))
model.add(BatchNormalization(center=True, scale=True))
model.add(Flatten())
model.add(BatchNormalization(center=True, scale=True))
model.add(Dense(128, activation='relu', kernel_regularizer=l2(0.01), bias_regularizer=l2(0.01)))
model.add(Dropout(0.5))
model.add(Dense(128, activation='relu', kernel_regularizer=l2(0.01), bias_regularizer=l2(0.01)))
model.add(Dense(1, activation='sigmoid', kernel_regularizer=l2(0.01), bias_regularizer=l2(0.01)))
# Compile the model
model.compile(optimizer = tf.keras.optimizers.Adam(lr=0.001), loss='binary_crossentropy', metrics=['accuracy'])
Does anyone have any tips? Thank you so much! Addtional info: Tensorflow 2.2.0, keras 2.3.0
Upvotes: 0
Views: 1956
Reputation: 2795
please try:
from tensorflow.keras.models import Sequential, Model
from tensorflow.keras.layers import Dense, Flatten, Conv3D, MaxPooling3D, Dropout, BatchNormalization, LeakyReLU
from tensorflow.keras.regularizers import l2
instead of:
import keras
from keras.models import Sequential, Model
from keras.layers import Dense, Flatten, Conv3D, MaxPooling3D, Dropout, BatchNormalization, LeakyReLU
from keras.regularizers import l2
TensorFlow 2.0 and above has keras built-in; no need to load Keras separately into your environment, just change the import statements
Upvotes: 2