Update src/streamlit_app.py
Browse files- src/streamlit_app.py +20 -52
src/streamlit_app.py
CHANGED
@@ -1,8 +1,6 @@
|
|
1 |
import os
|
2 |
-
#
|
3 |
-
# This
|
4 |
-
# which often causes PermissionError in sandboxed environments like Hugging Face Spaces.
|
5 |
-
os.environ["STREAMLIT_SERVER_BROWSER_GATHER_USAGE_STATS"] = "false"
|
6 |
|
7 |
# Ensure HOME is set to the current working directory for other potential uses
|
8 |
os.environ["HOME"] = os.getcwd()
|
@@ -22,7 +20,7 @@ class RepVGGBlock(layers.Layer):
|
|
22 |
groups=1, deploy=False, use_se=False, **kwargs):
|
23 |
super(RepVGGBlock, self).__init__(**kwargs)
|
24 |
self.config_initial_in_channels = in_channels
|
25 |
-
self.config_out_channels = out_channels
|
26 |
self.config_kernel_size = kernel_size
|
27 |
self.config_strides_val = stride
|
28 |
self.config_groups = groups
|
@@ -158,8 +156,7 @@ class RepVGGBlock(layers.Layer):
|
|
158 |
if not self.rbr_reparam.built:
|
159 |
raise Exception(f"CRITICAL ERROR: {self.rbr_reparam.name} not built before set_weights.")
|
160 |
|
161 |
-
self.rbr_reparam.set_weights([final_kernel, final_bias])
|
162 |
-
self._deploy_mode_internal = True
|
163 |
|
164 |
def get_config(self):
|
165 |
config = super(RepVGGBlock, self).get_config()
|
@@ -171,12 +168,10 @@ class RepVGGBlock(layers.Layer):
|
|
171 |
"groups": self.config_groups,
|
172 |
"deploy": self._deploy_mode_internal,
|
173 |
"use_se": self.config_use_se
|
174 |
-
})
|
175 |
-
return config
|
176 |
|
177 |
@classmethod
|
178 |
-
def from_config(cls, config):
|
179 |
-
return cls(**config)
|
180 |
|
181 |
# --- End of RepVGGBlock ---
|
182 |
|
@@ -184,59 +179,32 @@ class RepVGGBlock(layers.Layer):
|
|
184 |
class NECALayer(layers.Layer):
|
185 |
def __init__(self, channels, gamma=2, b=1, **kwargs):
|
186 |
super(NECALayer, self).__init__(**kwargs)
|
187 |
-
self.channels = channels
|
188 |
-
self.gamma = gamma
|
189 |
-
self.b = b
|
190 |
-
|
191 |
tf_channels = tf.cast(self.channels, tf.float32)
|
192 |
k_float = (tf.math.log(tf_channels) / tf.math.log(2.0) + self.b) / self.gamma
|
193 |
k_int = tf.cast(tf.round(k_float), tf.int32)
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
else:
|
198 |
-
self.k_scalar_val = k_int
|
199 |
-
self.k_scalar_val = tf.maximum(1, self.k_scalar_val) # Ensure kernel size is at least 1
|
200 |
-
|
201 |
-
# Convert to a Python int for Conv1D kernel_size
|
202 |
kernel_size_for_conv1d = (int(self.k_scalar_val.numpy()),)
|
203 |
-
|
204 |
self.gap = layers.GlobalAveragePooling2D(keepdims=True)
|
205 |
-
self.conv1d = layers.Conv1D(
|
206 |
-
filters=1, kernel_size=kernel_size_for_conv1d, padding='same', use_bias=False,
|
207 |
-
name=self.name + '_eca_conv1d'
|
208 |
-
)
|
209 |
self.sigmoid = layers.Activation('sigmoid')
|
|
|
|
|
|
|
|
|
210 |
|
211 |
def call(self, inputs):
|
212 |
-
|
213 |
-
|
214 |
-
|
215 |
-
|
216 |
-
x = self.gap(inputs)
|
217 |
-
x = tf.squeeze(x, axis=[1,2]) # Remove spatial dimensions
|
218 |
-
x = tf.expand_dims(x, axis=-1) # Add a channel dimension for Conv1D
|
219 |
-
|
220 |
-
x = self.conv1d(x)
|
221 |
-
x = tf.squeeze(x, axis=-1) # Remove the Conv1D output channel dimension
|
222 |
-
attention = self.sigmoid(x)
|
223 |
-
|
224 |
-
# Reshape attention for element-wise multiplication with input
|
225 |
return inputs * tf.reshape(attention, [-1, 1, 1, self.channels])
|
226 |
-
|
227 |
def get_config(self):
|
228 |
config = super(NECALayer, self).get_config()
|
229 |
-
config.update({
|
230 |
-
"channels": self.channels,
|
231 |
-
"gamma": self.gamma,
|
232 |
-
"b": self.b
|
233 |
-
})
|
234 |
-
return config
|
235 |
-
|
236 |
@classmethod
|
237 |
-
def from_config(cls, config):
|
238 |
-
return cls(**config)
|
239 |
-
|
240 |
# --- End of NECALayer ---
|
241 |
|
242 |
# --- Streamlit App Configuration ---
|
|
|
1 |
import os
|
2 |
+
# Removed: os.environ["STREAMLIT_SERVER_BROWSER_GATHER_USAGE_STATS"] = "false"
|
3 |
+
# This is now handled by .streamlit/config.toml for more robust deployment.
|
|
|
|
|
4 |
|
5 |
# Ensure HOME is set to the current working directory for other potential uses
|
6 |
os.environ["HOME"] = os.getcwd()
|
|
|
20 |
groups=1, deploy=False, use_se=False, **kwargs):
|
21 |
super(RepVGGBlock, self).__init__(**kwargs)
|
22 |
self.config_initial_in_channels = in_channels
|
23 |
+
self.config_out_channels = out_channels
|
24 |
self.config_kernel_size = kernel_size
|
25 |
self.config_strides_val = stride
|
26 |
self.config_groups = groups
|
|
|
156 |
if not self.rbr_reparam.built:
|
157 |
raise Exception(f"CRITICAL ERROR: {self.rbr_reparam.name} not built before set_weights.")
|
158 |
|
159 |
+
self.rbr_reparam.set_weights([final_kernel, final_bias]); self._deploy_mode_internal = True
|
|
|
160 |
|
161 |
def get_config(self):
|
162 |
config = super(RepVGGBlock, self).get_config()
|
|
|
168 |
"groups": self.config_groups,
|
169 |
"deploy": self._deploy_mode_internal,
|
170 |
"use_se": self.config_use_se
|
171 |
+
}); return config
|
|
|
172 |
|
173 |
@classmethod
|
174 |
+
def from_config(cls, config): return cls(**config)
|
|
|
175 |
|
176 |
# --- End of RepVGGBlock ---
|
177 |
|
|
|
179 |
class NECALayer(layers.Layer):
|
180 |
def __init__(self, channels, gamma=2, b=1, **kwargs):
|
181 |
super(NECALayer, self).__init__(**kwargs)
|
182 |
+
self.channels = channels; self.gamma = gamma; self.b = b
|
|
|
|
|
|
|
183 |
tf_channels = tf.cast(self.channels, tf.float32)
|
184 |
k_float = (tf.math.log(tf_channels) / tf.math.log(2.0) + self.b) / self.gamma
|
185 |
k_int = tf.cast(tf.round(k_float), tf.int32)
|
186 |
+
if tf.equal(k_int % 2, 0): self.k_scalar_val = k_int + 1
|
187 |
+
else: self.k_scalar_val = k_int
|
188 |
+
self.k_scalar_val = tf.maximum(1, self.k_scalar_val)
|
|
|
|
|
|
|
|
|
|
|
189 |
kernel_size_for_conv1d = (int(self.k_scalar_val.numpy()),)
|
|
|
190 |
self.gap = layers.GlobalAveragePooling2D(keepdims=True)
|
191 |
+
self.conv1d = layers.Conv1D(filters=1, kernel_size=kernel_size_for_conv1d, padding='same', use_bias=False, name=self.name + '_eca_conv1d')
|
|
|
|
|
|
|
192 |
self.sigmoid = layers.Activation('sigmoid')
|
193 |
+
# Reverted build method to original state as per user request
|
194 |
+
# def build(self, input_shape):
|
195 |
+
# super(NECALayer, self).build(input_shape)
|
196 |
+
# pass
|
197 |
|
198 |
def call(self, inputs):
|
199 |
+
if self.channels != inputs.shape[-1]: raise ValueError(f"Input channels {inputs.shape[-1]} != layer channels {self.channels} for {self.name}")
|
200 |
+
x = self.gap(inputs); x = tf.squeeze(x, axis=[1,2]); x = tf.expand_dims(x, axis=-1)
|
201 |
+
x = self.conv1d(x); x = tf.squeeze(x, axis=-1); attention = self.sigmoid(x)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
202 |
return inputs * tf.reshape(attention, [-1, 1, 1, self.channels])
|
|
|
203 |
def get_config(self):
|
204 |
config = super(NECALayer, self).get_config()
|
205 |
+
config.update({"channels": self.channels, "gamma": self.gamma, "b": self.b}); return config
|
|
|
|
|
|
|
|
|
|
|
|
|
206 |
@classmethod
|
207 |
+
def from_config(cls, config): return cls(**config)
|
|
|
|
|
208 |
# --- End of NECALayer ---
|
209 |
|
210 |
# --- Streamlit App Configuration ---
|