Skip to content

Commit 8882198

Browse files
authored
Add files via upload
1 parent 877e25a commit 8882198

File tree

5 files changed

+121
-0
lines changed

5 files changed

+121
-0
lines changed

models/cvt_model.py

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import tensorflow as tf
2+
from tensorflow.keras import layers
3+
from tensorflow.keras.applications import VGG16 # Placeholder for ViT
4+
5+
def build_vit_model(num_classes):
6+
inputs = tf.keras.Input(shape=(224, 224, 3))
7+
base_model = VGG16(include_top=False, input_tensor=inputs, weights='imagenet') # Use a ViT implementation here
8+
base_model.trainable = True
9+
10+
x = layers.Flatten()(base_model.output)
11+
x = layers.Dropout(0.5)(x)
12+
outputs = layers.Dense(num_classes, activation='softmax')(x)
13+
14+
model = tf.keras.Model(inputs, outputs)
15+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
16+
return model

models/efficientnet_model.py

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import tensorflow as tf
2+
from tensorflow.keras import layers
3+
from tensorflow.keras.applications import EfficientNetB0
4+
5+
def build_efficientnet_model(num_classes):
6+
inputs = tf.keras.Input(shape=(224, 224, 3))
7+
base_model = EfficientNetB0(include_top=False, input_tensor=inputs, weights='imagenet')
8+
base_model.trainable = True
9+
10+
x = layers.GlobalAveragePooling2D()(base_model.output)
11+
x = layers.BatchNormalization()(x)
12+
x = layers.Dropout(0.4)(x)
13+
outputs = layers.Dense(num_classes, activation="softmax")(x)
14+
15+
model = tf.keras.Model(inputs, outputs)
16+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
17+
return model

models/model.py

+55
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,55 @@
1+
import tensorflow as tf
2+
from tensorflow.keras import layers
3+
from tensorflow.keras.applications import EfficientNetB0, ResNet50
4+
from tensorflow.keras.applications import ResNet50, VGG16
5+
from tensorflow.keras.layers import Input, Dense, GlobalAveragePooling2D
6+
7+
# EfficientNet model
8+
def build_efficientnet_model(num_classes):
9+
inputs = Input(shape=(224, 224, 3))
10+
base_model = EfficientNetB0(include_top=False, input_tensor=inputs, weights='imagenet')
11+
base_model.trainable = True
12+
13+
x = GlobalAveragePooling2D(name="avg_pool")(base_model.output)
14+
x = layers.BatchNormalization()(x)
15+
x = layers.Dropout(0.4, name="top_dropout")(x)
16+
outputs = Dense(num_classes, activation="softmax", name="pred")(x)
17+
18+
model = tf.keras.Model(inputs, outputs)
19+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
20+
return model
21+
22+
# ResNet model
23+
def build_resnet_model(num_classes):
24+
inputs = Input(shape=(224, 224, 3))
25+
base_model = ResNet50(include_top=False, input_tensor=inputs, weights='imagenet')
26+
base_model.trainable = True
27+
28+
x = GlobalAveragePooling2D(name="avg_pool")(base_model.output)
29+
x = layers.BatchNormalization()(x)
30+
x = layers.Dropout(0.4, name="top_dropout")(x)
31+
outputs = Dense(num_classes, activation="softmax", name="pred")(x)
32+
33+
model = tf.keras.Model(inputs, outputs)
34+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
35+
return model
36+
37+
# Vision Transformer model (ViT)
38+
def build_vit_model(num_classes):
39+
from tensorflow.keras.layers import Dense, Flatten, Dropout
40+
from tensorflow.keras.applications import VGG16
41+
42+
# Here, we can replace VGG16 with an actual ViT implementation or a library that supports it
43+
# For demonstration, I will use a placeholder approach
44+
# Note: You need to implement or import a ViT architecture from a compatible library
45+
inputs = Input(shape=(224, 224, 3))
46+
base_model = VGG16(include_top=False, input_tensor=inputs, weights='imagenet')
47+
base_model.trainable = True
48+
49+
x = Flatten()(base_model.output)
50+
x = Dropout(0.5)(x)
51+
outputs = Dense(num_classes, activation='softmax')(x)
52+
53+
model = tf.keras.Model(inputs, outputs)
54+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
55+
return model

models/resnet_model.py

+17
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
import tensorflow as tf
2+
from tensorflow.keras import layers
3+
from tensorflow.keras.applications import ResNet50
4+
5+
def build_resnet_model(num_classes):
6+
inputs = tf.keras.Input(shape=(224, 224, 3))
7+
base_model = ResNet50(include_top=False, input_tensor=inputs, weights='imagenet')
8+
base_model.trainable = True
9+
10+
x = layers.GlobalAveragePooling2D()(base_model.output)
11+
x = layers.BatchNormalization()(x)
12+
x = layers.Dropout(0.4)(x)
13+
outputs = layers.Dense(num_classes, activation="softmax")(x)
14+
15+
model = tf.keras.Model(inputs, outputs)
16+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
17+
return model

models/swin_model.py

+16
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import tensorflow as tf
2+
from tensorflow.keras import layers
3+
4+
# Placeholder for Swin Transformer implementation
5+
def build_swin_model(num_classes):
6+
inputs = tf.keras.Input(shape=(224, 224, 3))
7+
# Implement Swin Transformer architecture here
8+
9+
# Placeholder output for demonstration
10+
x = layers.Flatten()(inputs)
11+
x = layers.Dropout(0.5)(x)
12+
outputs = layers.Dense(num_classes, activation='softmax')(x)
13+
14+
model = tf.keras.Model(inputs, outputs)
15+
model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
16+
return model

0 commit comments

Comments
 (0)