Spaces:
Running
on
Zero
Running
on
Zero
Add 1D LUTS, UI update
Browse files- LUT/gist_heat.cube +258 -0
- app.py +65 -35
- trellis/pipelines/base.py +8 -5
- utils/image_utils.py +50 -64
LUT/gist_heat.cube
ADDED
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
TITLE "Colormap gist_heat"
|
2 |
+
LUT_1D_SIZE 256
|
3 |
+
0.0000000 0.0000000 0.0000000
|
4 |
+
0.0058824 0.0000000 0.0000000
|
5 |
+
0.0117647 0.0000000 0.0000000
|
6 |
+
0.0176471 0.0000000 0.0000000
|
7 |
+
0.0235294 0.0000000 0.0000000
|
8 |
+
0.0294118 0.0000000 0.0000000
|
9 |
+
0.0352941 0.0000000 0.0000000
|
10 |
+
0.0411765 0.0000000 0.0000000
|
11 |
+
0.0470588 0.0000000 0.0000000
|
12 |
+
0.0529412 0.0000000 0.0000000
|
13 |
+
0.0588235 0.0000000 0.0000000
|
14 |
+
0.0647059 0.0000000 0.0000000
|
15 |
+
0.0705882 0.0000000 0.0000000
|
16 |
+
0.0764706 0.0000000 0.0000000
|
17 |
+
0.0823529 0.0000000 0.0000000
|
18 |
+
0.0882353 0.0000000 0.0000000
|
19 |
+
0.0941176 0.0000000 0.0000000
|
20 |
+
0.1000000 0.0000000 0.0000000
|
21 |
+
0.1058824 0.0000000 0.0000000
|
22 |
+
0.1117647 0.0000000 0.0000000
|
23 |
+
0.1176471 0.0000000 0.0000000
|
24 |
+
0.1235294 0.0000000 0.0000000
|
25 |
+
0.1294118 0.0000000 0.0000000
|
26 |
+
0.1352941 0.0000000 0.0000000
|
27 |
+
0.1411765 0.0000000 0.0000000
|
28 |
+
0.1470588 0.0000000 0.0000000
|
29 |
+
0.1529412 0.0000000 0.0000000
|
30 |
+
0.1588235 0.0000000 0.0000000
|
31 |
+
0.1647059 0.0000000 0.0000000
|
32 |
+
0.1705882 0.0000000 0.0000000
|
33 |
+
0.1764706 0.0000000 0.0000000
|
34 |
+
0.1823529 0.0000000 0.0000000
|
35 |
+
0.1882353 0.0000000 0.0000000
|
36 |
+
0.1941176 0.0000000 0.0000000
|
37 |
+
0.2000000 0.0000000 0.0000000
|
38 |
+
0.2058824 0.0000000 0.0000000
|
39 |
+
0.2117647 0.0000000 0.0000000
|
40 |
+
0.2176471 0.0000000 0.0000000
|
41 |
+
0.2235294 0.0000000 0.0000000
|
42 |
+
0.2294118 0.0000000 0.0000000
|
43 |
+
0.2352941 0.0000000 0.0000000
|
44 |
+
0.2411765 0.0000000 0.0000000
|
45 |
+
0.2470588 0.0000000 0.0000000
|
46 |
+
0.2529412 0.0000000 0.0000000
|
47 |
+
0.2588235 0.0000000 0.0000000
|
48 |
+
0.2647059 0.0000000 0.0000000
|
49 |
+
0.2705882 0.0000000 0.0000000
|
50 |
+
0.2764706 0.0000000 0.0000000
|
51 |
+
0.2823529 0.0000000 0.0000000
|
52 |
+
0.2882353 0.0000000 0.0000000
|
53 |
+
0.2941176 0.0000000 0.0000000
|
54 |
+
0.3000000 0.0000000 0.0000000
|
55 |
+
0.3058824 0.0000000 0.0000000
|
56 |
+
0.3117647 0.0000000 0.0000000
|
57 |
+
0.3176471 0.0000000 0.0000000
|
58 |
+
0.3235294 0.0000000 0.0000000
|
59 |
+
0.3294118 0.0000000 0.0000000
|
60 |
+
0.3352941 0.0000000 0.0000000
|
61 |
+
0.3411765 0.0000000 0.0000000
|
62 |
+
0.3470588 0.0000000 0.0000000
|
63 |
+
0.3529412 0.0000000 0.0000000
|
64 |
+
0.3588235 0.0000000 0.0000000
|
65 |
+
0.3647059 0.0000000 0.0000000
|
66 |
+
0.3705882 0.0000000 0.0000000
|
67 |
+
0.3764706 0.0000000 0.0000000
|
68 |
+
0.3823529 0.0000000 0.0000000
|
69 |
+
0.3882353 0.0000000 0.0000000
|
70 |
+
0.3941176 0.0000000 0.0000000
|
71 |
+
0.4000000 0.0000000 0.0000000
|
72 |
+
0.4058824 0.0000000 0.0000000
|
73 |
+
0.4117647 0.0000000 0.0000000
|
74 |
+
0.4176471 0.0000000 0.0000000
|
75 |
+
0.4235294 0.0000000 0.0000000
|
76 |
+
0.4294118 0.0000000 0.0000000
|
77 |
+
0.4352941 0.0000000 0.0000000
|
78 |
+
0.4411765 0.0000000 0.0000000
|
79 |
+
0.4470588 0.0000000 0.0000000
|
80 |
+
0.4529412 0.0000000 0.0000000
|
81 |
+
0.4588235 0.0000000 0.0000000
|
82 |
+
0.4647059 0.0000000 0.0000000
|
83 |
+
0.4705882 0.0000000 0.0000000
|
84 |
+
0.4764706 0.0000000 0.0000000
|
85 |
+
0.4823529 0.0000000 0.0000000
|
86 |
+
0.4882353 0.0000000 0.0000000
|
87 |
+
0.4941176 0.0000000 0.0000000
|
88 |
+
0.5000000 0.0000000 0.0000000
|
89 |
+
0.5058824 0.0000000 0.0000000
|
90 |
+
0.5117647 0.0000000 0.0000000
|
91 |
+
0.5176471 0.0000000 0.0000000
|
92 |
+
0.5235294 0.0000000 0.0000000
|
93 |
+
0.5294118 0.0000000 0.0000000
|
94 |
+
0.5352941 0.0000000 0.0000000
|
95 |
+
0.5411765 0.0000000 0.0000000
|
96 |
+
0.5470588 0.0000000 0.0000000
|
97 |
+
0.5529412 0.0000000 0.0000000
|
98 |
+
0.5588235 0.0000000 0.0000000
|
99 |
+
0.5647059 0.0000000 0.0000000
|
100 |
+
0.5705882 0.0000000 0.0000000
|
101 |
+
0.5764706 0.0000000 0.0000000
|
102 |
+
0.5823529 0.0000000 0.0000000
|
103 |
+
0.5882353 0.0000000 0.0000000
|
104 |
+
0.5941176 0.0000000 0.0000000
|
105 |
+
0.6000000 0.0000000 0.0000000
|
106 |
+
0.6058824 0.0000000 0.0000000
|
107 |
+
0.6117647 0.0000000 0.0000000
|
108 |
+
0.6176471 0.0000000 0.0000000
|
109 |
+
0.6235294 0.0000000 0.0000000
|
110 |
+
0.6294118 0.0000000 0.0000000
|
111 |
+
0.6352941 0.0000000 0.0000000
|
112 |
+
0.6411765 0.0000000 0.0000000
|
113 |
+
0.6470588 0.0000000 0.0000000
|
114 |
+
0.6529412 0.0000000 0.0000000
|
115 |
+
0.6588235 0.0000000 0.0000000
|
116 |
+
0.6647059 0.0000000 0.0000000
|
117 |
+
0.6705882 0.0000000 0.0000000
|
118 |
+
0.6764706 0.0000000 0.0000000
|
119 |
+
0.6823529 0.0000000 0.0000000
|
120 |
+
0.6882353 0.0000000 0.0000000
|
121 |
+
0.6941176 0.0000000 0.0000000
|
122 |
+
0.7000000 0.0000000 0.0000000
|
123 |
+
0.7058824 0.0000000 0.0000000
|
124 |
+
0.7117647 0.0000000 0.0000000
|
125 |
+
0.7176471 0.0000000 0.0000000
|
126 |
+
0.7235294 0.0000000 0.0000000
|
127 |
+
0.7294118 0.0000000 0.0000000
|
128 |
+
0.7352941 0.0000000 0.0000000
|
129 |
+
0.7411765 0.0000000 0.0000000
|
130 |
+
0.7470588 0.0000000 0.0000000
|
131 |
+
0.7529412 0.0039216 0.0000000
|
132 |
+
0.7588235 0.0117647 0.0000000
|
133 |
+
0.7647059 0.0196078 0.0000000
|
134 |
+
0.7705882 0.0274510 0.0000000
|
135 |
+
0.7764706 0.0352941 0.0000000
|
136 |
+
0.7823529 0.0431373 0.0000000
|
137 |
+
0.7882353 0.0509804 0.0000000
|
138 |
+
0.7941176 0.0588235 0.0000000
|
139 |
+
0.8000000 0.0666667 0.0000000
|
140 |
+
0.8058824 0.0745098 0.0000000
|
141 |
+
0.8117647 0.0823529 0.0000000
|
142 |
+
0.8176471 0.0901961 0.0000000
|
143 |
+
0.8235294 0.0980392 0.0000000
|
144 |
+
0.8294118 0.1058824 0.0000000
|
145 |
+
0.8352941 0.1137255 0.0000000
|
146 |
+
0.8411765 0.1215686 0.0000000
|
147 |
+
0.8470588 0.1294118 0.0000000
|
148 |
+
0.8529412 0.1372549 0.0000000
|
149 |
+
0.8588235 0.1450980 0.0000000
|
150 |
+
0.8647059 0.1529412 0.0000000
|
151 |
+
0.8705882 0.1607843 0.0000000
|
152 |
+
0.8764706 0.1686275 0.0000000
|
153 |
+
0.8823529 0.1764706 0.0000000
|
154 |
+
0.8882353 0.1843137 0.0000000
|
155 |
+
0.8941176 0.1921569 0.0000000
|
156 |
+
0.9000000 0.2000000 0.0000000
|
157 |
+
0.9058824 0.2078431 0.0000000
|
158 |
+
0.9117647 0.2156863 0.0000000
|
159 |
+
0.9176471 0.2235294 0.0000000
|
160 |
+
0.9235294 0.2313725 0.0000000
|
161 |
+
0.9294118 0.2392157 0.0000000
|
162 |
+
0.9352941 0.2470588 0.0000000
|
163 |
+
0.9411765 0.2549020 0.0000000
|
164 |
+
0.9470588 0.2627451 0.0000000
|
165 |
+
0.9529412 0.2705882 0.0000000
|
166 |
+
0.9588235 0.2784314 0.0000000
|
167 |
+
0.9647059 0.2862745 0.0000000
|
168 |
+
0.9705882 0.2941176 0.0000000
|
169 |
+
0.9764706 0.3019608 0.0000000
|
170 |
+
0.9823529 0.3098039 0.0000000
|
171 |
+
0.9882353 0.3176471 0.0000000
|
172 |
+
0.9941176 0.3254902 0.0000000
|
173 |
+
1.0000000 0.3333333 0.0000000
|
174 |
+
1.0000000 0.3411765 0.0000000
|
175 |
+
1.0000000 0.3490196 0.0000000
|
176 |
+
1.0000000 0.3568627 0.0000000
|
177 |
+
1.0000000 0.3647059 0.0000000
|
178 |
+
1.0000000 0.3725490 0.0000000
|
179 |
+
1.0000000 0.3803922 0.0000000
|
180 |
+
1.0000000 0.3882353 0.0000000
|
181 |
+
1.0000000 0.3960784 0.0000000
|
182 |
+
1.0000000 0.4039216 0.0000000
|
183 |
+
1.0000000 0.4117647 0.0000000
|
184 |
+
1.0000000 0.4196078 0.0000000
|
185 |
+
1.0000000 0.4274510 0.0000000
|
186 |
+
1.0000000 0.4352941 0.0000000
|
187 |
+
1.0000000 0.4431373 0.0000000
|
188 |
+
1.0000000 0.4509804 0.0000000
|
189 |
+
1.0000000 0.4588235 0.0000000
|
190 |
+
1.0000000 0.4666667 0.0000000
|
191 |
+
1.0000000 0.4745098 0.0000000
|
192 |
+
1.0000000 0.4823529 0.0000000
|
193 |
+
1.0000000 0.4901961 0.0000000
|
194 |
+
1.0000000 0.4980392 0.0000000
|
195 |
+
1.0000000 0.5058824 0.0117647
|
196 |
+
1.0000000 0.5137255 0.0274510
|
197 |
+
1.0000000 0.5215686 0.0431373
|
198 |
+
1.0000000 0.5294118 0.0588235
|
199 |
+
1.0000000 0.5372549 0.0745098
|
200 |
+
1.0000000 0.5450980 0.0901961
|
201 |
+
1.0000000 0.5529412 0.1058824
|
202 |
+
1.0000000 0.5607843 0.1215686
|
203 |
+
1.0000000 0.5686275 0.1372549
|
204 |
+
1.0000000 0.5764706 0.1529412
|
205 |
+
1.0000000 0.5843137 0.1686275
|
206 |
+
1.0000000 0.5921569 0.1843137
|
207 |
+
1.0000000 0.6000000 0.2000000
|
208 |
+
1.0000000 0.6078431 0.2156863
|
209 |
+
1.0000000 0.6156863 0.2313725
|
210 |
+
1.0000000 0.6235294 0.2470588
|
211 |
+
1.0000000 0.6313725 0.2627451
|
212 |
+
1.0000000 0.6392157 0.2784314
|
213 |
+
1.0000000 0.6470588 0.2941176
|
214 |
+
1.0000000 0.6549020 0.3098039
|
215 |
+
1.0000000 0.6627451 0.3254902
|
216 |
+
1.0000000 0.6705882 0.3411765
|
217 |
+
1.0000000 0.6784314 0.3568627
|
218 |
+
1.0000000 0.6862745 0.3725490
|
219 |
+
1.0000000 0.6941176 0.3882353
|
220 |
+
1.0000000 0.7019608 0.4039216
|
221 |
+
1.0000000 0.7098039 0.4196078
|
222 |
+
1.0000000 0.7176471 0.4352941
|
223 |
+
1.0000000 0.7254902 0.4509804
|
224 |
+
1.0000000 0.7333333 0.4666667
|
225 |
+
1.0000000 0.7411765 0.4823529
|
226 |
+
1.0000000 0.7490196 0.4980392
|
227 |
+
1.0000000 0.7568627 0.5137255
|
228 |
+
1.0000000 0.7647059 0.5294118
|
229 |
+
1.0000000 0.7725490 0.5450980
|
230 |
+
1.0000000 0.7803922 0.5607843
|
231 |
+
1.0000000 0.7882353 0.5764706
|
232 |
+
1.0000000 0.7960784 0.5921569
|
233 |
+
1.0000000 0.8039216 0.6078431
|
234 |
+
1.0000000 0.8117647 0.6235294
|
235 |
+
1.0000000 0.8196078 0.6392157
|
236 |
+
1.0000000 0.8274510 0.6549020
|
237 |
+
1.0000000 0.8352941 0.6705882
|
238 |
+
1.0000000 0.8431373 0.6862745
|
239 |
+
1.0000000 0.8509804 0.7019608
|
240 |
+
1.0000000 0.8588235 0.7176471
|
241 |
+
1.0000000 0.8666667 0.7333333
|
242 |
+
1.0000000 0.8745098 0.7490196
|
243 |
+
1.0000000 0.8823529 0.7647059
|
244 |
+
1.0000000 0.8901961 0.7803922
|
245 |
+
1.0000000 0.8980392 0.7960784
|
246 |
+
1.0000000 0.9058824 0.8117647
|
247 |
+
1.0000000 0.9137255 0.8274510
|
248 |
+
1.0000000 0.9215686 0.8431373
|
249 |
+
1.0000000 0.9294118 0.8588235
|
250 |
+
1.0000000 0.9372549 0.8745098
|
251 |
+
1.0000000 0.9450980 0.8901961
|
252 |
+
1.0000000 0.9529412 0.9058824
|
253 |
+
1.0000000 0.9607843 0.9215686
|
254 |
+
1.0000000 0.9686275 0.9372549
|
255 |
+
1.0000000 0.9764706 0.9529412
|
256 |
+
1.0000000 0.9843137 0.9686275
|
257 |
+
1.0000000 0.9921569 0.9843137
|
258 |
+
1.0000000 1.0000000 1.0000000
|
app.py
CHANGED
@@ -27,8 +27,6 @@ import logging
|
|
27 |
#logging.getLogger("transformers.modeling_utils").setLevel(logging.ERROR)
|
28 |
import gc
|
29 |
|
30 |
-
IS_SHARED_SPACE = constants.IS_SHARED_SPACE
|
31 |
-
|
32 |
# Import functions from modules
|
33 |
from utils.file_utils import cleanup_temp_files
|
34 |
|
@@ -753,20 +751,42 @@ def replace_input_with_sketch_image(sketch_image):
|
|
753 |
sketch, is_dict = get_image_from_dict(sketch_image)
|
754 |
return sketch
|
755 |
####################################### DEPTH ESTIMATION #######################################
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
756 |
def load_3d_models(is_open: bool = True) -> bool:
|
757 |
if is_open:
|
758 |
gr.Info("Loading 3D models...")
|
759 |
-
global image_processor, depth_model
|
760 |
image_processor = DPTImageProcessor.from_pretrained("Intel/dpt-large")
|
761 |
depth_model = DPTForDepthEstimation.from_pretrained("Intel/dpt-large", ignore_mismatched_sizes=True)
|
762 |
-
|
763 |
-
|
764 |
-
|
765 |
-
TRELLIS_PIPELINE.preprocess_image(Image.fromarray(np.zeros((256, 256, 3), dtype=np.uint8)))
|
766 |
-
except Exception as e:
|
767 |
-
print(f"Error preloading TRELLIS_PIPELINE: {e}")
|
768 |
print("3D models loaded")
|
769 |
gr.Info("3D models loaded.")
|
|
|
|
|
770 |
return gr.update(interactive = is_open)
|
771 |
|
772 |
def unload_3d_models(is_open: bool = False) -> bool:
|
@@ -775,12 +795,12 @@ def unload_3d_models(is_open: bool = False) -> bool:
|
|
775 |
global image_processor, depth_model, TRELLIS_PIPELINE
|
776 |
if TRELLIS_PIPELINE:
|
777 |
TRELLIS_PIPELINE.to("cpu")
|
778 |
-
|
779 |
if depth_model:
|
780 |
del image_processor
|
781 |
del depth_model
|
782 |
-
#torch.cuda.empty_cache()
|
783 |
-
#torch.cuda.ipc_collect()
|
784 |
gc.collect()
|
785 |
print("3D models unloaded and CUDA memory freed")
|
786 |
gr.Info("3D models unloaded.")
|
@@ -928,7 +948,7 @@ def generate_3d_asset_part2(depth_img, image_path, output_name, seed, steps, mod
|
|
928 |
depth_img = Image.open(depth_img).convert("RGBA")
|
929 |
# Preprocess and run the Trellis pipeline with fixed sampler settings
|
930 |
try:
|
931 |
-
TRELLIS_PIPELINE.
|
932 |
processed_image = TRELLIS_PIPELINE.preprocess_image(resized_image, max_resolution=model_resolution)
|
933 |
outputs = TRELLIS_PIPELINE.run(
|
934 |
processed_image,
|
@@ -1154,19 +1174,12 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1154 |
color_picker = gr.ColorPicker(label="Pick a color to exclude",value="#505050")
|
1155 |
with gr.Column():
|
1156 |
filter_color = gr.Checkbox(label="Filter Excluded Colors from Sampling", value=False,)
|
|
|
1157 |
exclude_color_button = gr.Button("Exclude Color", elem_id="exlude_color_button", elem_classes="solid")
|
1158 |
color_display = gr.DataFrame(label="List of Excluded RGBA Colors", headers=["R", "G", "B", "A"], elem_id="excluded_colors", type="array", value=build_dataframe(excluded_color_list), interactive=True, elem_classes="solid centered")
|
1159 |
selected_row = gr.Number(0, label="Selected Row", visible=False)
|
1160 |
-
delete_button = gr.Button("Delete Row", elem_id="delete_exclusion_button", elem_classes="solid")
|
1161 |
-
fill_hex = gr.Checkbox(label="Fill Hex with color from Image", value=True)
|
1162 |
with gr.Accordion("Image Filters", open = False):
|
1163 |
-
with gr.Row():
|
1164 |
-
with gr.Column():
|
1165 |
-
composite_color = gr.ColorPicker(label="Color", value="#ede9ac44")
|
1166 |
-
with gr.Column():
|
1167 |
-
composite_opacity = gr.Slider(label="Opacity %", minimum=0, maximum=100, value=50, interactive=True)
|
1168 |
-
with gr.Row():
|
1169 |
-
composite_button = gr.Button("Composite", elem_classes="solid")
|
1170 |
with gr.Row():
|
1171 |
with gr.Column():
|
1172 |
lut_filename = gr.Textbox(
|
@@ -1179,7 +1192,10 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1179 |
file_count="single",
|
1180 |
file_types=[".cube"],
|
1181 |
type="filepath",
|
1182 |
-
label="LUT cube File"
|
|
|
|
|
|
|
1183 |
with gr.Row():
|
1184 |
lut_example_image = gr.Image(type="pil", label="Filter (LUT) Example Image", value=constants.default_lut_example_img)
|
1185 |
with gr.Row():
|
@@ -1199,18 +1215,20 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1199 |
examples_per_page = 15,
|
1200 |
)
|
1201 |
|
1202 |
-
with gr.Row():
|
1203 |
-
apply_lut_button = gr.Button("Apply Filter (LUT)", elem_classes="solid", elem_id="apply_lut_button")
|
1204 |
-
|
1205 |
lut_file.change(get_filename, inputs=[lut_file], outputs=[lut_filename])
|
1206 |
-
lut_filename.change(show_lut, inputs=[lut_filename, lut_example_image], outputs=[lut_example_image])
|
1207 |
apply_lut_button.click(
|
1208 |
lambda lut_filename, input_image: gr.Warning("Please upload an Input Image to get started.") if input_image is None else apply_lut_to_image_path(lut_filename, input_image)[1],
|
1209 |
inputs=[lut_filename, input_image],
|
1210 |
outputs=[input_image],
|
1211 |
scroll_to_output=True
|
1212 |
)
|
1213 |
-
|
|
|
|
|
|
|
|
|
|
|
1214 |
with gr.Row():
|
1215 |
with gr.Accordion("Generate AI Image (click here for options)", open = False):
|
1216 |
with gr.Row():
|
@@ -1229,7 +1247,7 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1229 |
lines=2,
|
1230 |
visible=False
|
1231 |
)
|
1232 |
-
with gr.Accordion("Choose Style
|
1233 |
lora_gallery = gr.Gallery(
|
1234 |
[(open_image(image_path), title) for image_path, title in lora_models],
|
1235 |
label="Styles",
|
@@ -1455,6 +1473,10 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1455 |
scroll_to_output=True
|
1456 |
)
|
1457 |
generate_input_image.click(
|
|
|
|
|
|
|
|
|
1458 |
fn=generate_input_image_click,
|
1459 |
inputs=[input_image,map_options, prompt_textbox, negative_prompt_textbox, model_textbox, randomize_seed, seed_slider, gr.State(False), sketch_image, image_guidance_stength, image_size_ratio],
|
1460 |
outputs=[input_image, seed_slider], scroll_to_output=True
|
@@ -1512,6 +1534,10 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1512 |
|
1513 |
#use conditioned_image as the input_image for generate_input_image_click
|
1514 |
generate_input_image_from_gallery.click(
|
|
|
|
|
|
|
|
|
1515 |
fn=generate_input_image_click,
|
1516 |
inputs=[input_image, map_options, prompt_textbox, negative_prompt_textbox, model_textbox,randomize_seed, seed_slider, gr.State(True), sketch_image , image_guidance_stength, image_size_ratio],
|
1517 |
outputs=[input_image, seed_slider], scroll_to_output=True
|
@@ -1569,6 +1595,9 @@ with gr.Blocks(css_paths="style_20250314.css", title=title, theme='Surn/beeuty',
|
|
1569 |
# outputs=[depth_map_output, model_output, model_file], scroll_to_output=True
|
1570 |
# )
|
1571 |
accordian_3d.expand(
|
|
|
|
|
|
|
1572 |
fn=load_3d_models,
|
1573 |
trigger_mode="always_last",
|
1574 |
outputs=[generate_3d_asset_button],
|
@@ -1646,12 +1675,13 @@ if __name__ == "__main__":
|
|
1646 |
|
1647 |
# image_processor = DPTImageProcessor.from_pretrained("Intel/dpt-large")
|
1648 |
# depth_model = DPTForDepthEstimation.from_pretrained("Intel/dpt-large", ignore_mismatched_sizes=True)
|
1649 |
-
#
|
1650 |
-
#
|
1651 |
-
#
|
1652 |
-
#
|
1653 |
-
#
|
1654 |
-
#
|
|
|
1655 |
hexaGrid.queue(default_concurrency_limit=1,max_size=12,api_open=False)
|
1656 |
hexaGrid.launch(allowed_paths=["assets","/","./assets","images","./images", "./images/prerendered", 'e:/TMP'], favicon_path="./assets/favicon.ico", max_file_size="10mb")
|
1657 |
|
|
|
27 |
#logging.getLogger("transformers.modeling_utils").setLevel(logging.ERROR)
|
28 |
import gc
|
29 |
|
|
|
|
|
30 |
# Import functions from modules
|
31 |
from utils.file_utils import cleanup_temp_files
|
32 |
|
|
|
751 |
sketch, is_dict = get_image_from_dict(sketch_image)
|
752 |
return sketch
|
753 |
####################################### DEPTH ESTIMATION #######################################
|
754 |
+
|
755 |
+
@spaces.GPU(progress=gr.Progress(track_tqdm=True))
|
756 |
+
def load_trellis_model():
|
757 |
+
global TRELLIS_PIPELINE
|
758 |
+
loaded = False
|
759 |
+
if TRELLIS_PIPELINE == None:
|
760 |
+
try:
|
761 |
+
TRELLIS_PIPELINE = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
762 |
+
TRELLIS_PIPELINE.cuda()
|
763 |
+
# Preload with a dummy image to finalize initialization
|
764 |
+
try:
|
765 |
+
TRELLIS_PIPELINE.preprocess_image(Image.fromarray(np.zeros((512, 512, 4), dtype=np.uint8))) # Preload rembg
|
766 |
+
except:
|
767 |
+
pass
|
768 |
+
print("TRELLIS_PIPELINE loaded\n")
|
769 |
+
gr.Info("TRELLIS_PIPELINE loaded")
|
770 |
+
loaded = True
|
771 |
+
except Exception as e:
|
772 |
+
print(f"Error preloading TRELLIS_PIPELINE: {e}")
|
773 |
+
gr.Error(f"Failed to load TRELLIS_PIPELINE: {e}")
|
774 |
+
TRELLIS_PIPELINE = None
|
775 |
+
|
776 |
+
|
777 |
def load_3d_models(is_open: bool = True) -> bool:
|
778 |
if is_open:
|
779 |
gr.Info("Loading 3D models...")
|
780 |
+
global image_processor, depth_model
|
781 |
image_processor = DPTImageProcessor.from_pretrained("Intel/dpt-large")
|
782 |
depth_model = DPTForDepthEstimation.from_pretrained("Intel/dpt-large", ignore_mismatched_sizes=True)
|
783 |
+
print("DPT models loaded\n")
|
784 |
+
# is_open = load_trellis_model()
|
785 |
+
# if is_open:
|
|
|
|
|
|
|
786 |
print("3D models loaded")
|
787 |
gr.Info("3D models loaded.")
|
788 |
+
#else:
|
789 |
+
# gr.Error("Failed to load TRELLIS_PIPELINE.")
|
790 |
return gr.update(interactive = is_open)
|
791 |
|
792 |
def unload_3d_models(is_open: bool = False) -> bool:
|
|
|
795 |
global image_processor, depth_model, TRELLIS_PIPELINE
|
796 |
if TRELLIS_PIPELINE:
|
797 |
TRELLIS_PIPELINE.to("cpu")
|
798 |
+
TRELLIS_PIPELINE = None
|
799 |
if depth_model:
|
800 |
del image_processor
|
801 |
del depth_model
|
802 |
+
# torch.cuda.empty_cache()
|
803 |
+
# torch.cuda.ipc_collect()
|
804 |
gc.collect()
|
805 |
print("3D models unloaded and CUDA memory freed")
|
806 |
gr.Info("3D models unloaded.")
|
|
|
948 |
depth_img = Image.open(depth_img).convert("RGBA")
|
949 |
# Preprocess and run the Trellis pipeline with fixed sampler settings
|
950 |
try:
|
951 |
+
#TRELLIS_PIPELINE.cuda()
|
952 |
processed_image = TRELLIS_PIPELINE.preprocess_image(resized_image, max_resolution=model_resolution)
|
953 |
outputs = TRELLIS_PIPELINE.run(
|
954 |
processed_image,
|
|
|
1174 |
color_picker = gr.ColorPicker(label="Pick a color to exclude",value="#505050")
|
1175 |
with gr.Column():
|
1176 |
filter_color = gr.Checkbox(label="Filter Excluded Colors from Sampling", value=False,)
|
1177 |
+
fill_hex = gr.Checkbox(label="Fill Hex with color from Image", value=True)
|
1178 |
exclude_color_button = gr.Button("Exclude Color", elem_id="exlude_color_button", elem_classes="solid")
|
1179 |
color_display = gr.DataFrame(label="List of Excluded RGBA Colors", headers=["R", "G", "B", "A"], elem_id="excluded_colors", type="array", value=build_dataframe(excluded_color_list), interactive=True, elem_classes="solid centered")
|
1180 |
selected_row = gr.Number(0, label="Selected Row", visible=False)
|
1181 |
+
delete_button = gr.Button("Delete Row", elem_id="delete_exclusion_button", elem_classes="solid")
|
|
|
1182 |
with gr.Accordion("Image Filters", open = False):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1183 |
with gr.Row():
|
1184 |
with gr.Column():
|
1185 |
lut_filename = gr.Textbox(
|
|
|
1192 |
file_count="single",
|
1193 |
file_types=[".cube"],
|
1194 |
type="filepath",
|
1195 |
+
label="LUT cube File",
|
1196 |
+
height=120)
|
1197 |
+
with gr.Row():
|
1198 |
+
apply_lut_button = gr.Button("Apply Filter (LUT)", elem_classes="solid", elem_id="apply_lut_button")
|
1199 |
with gr.Row():
|
1200 |
lut_example_image = gr.Image(type="pil", label="Filter (LUT) Example Image", value=constants.default_lut_example_img)
|
1201 |
with gr.Row():
|
|
|
1215 |
examples_per_page = 15,
|
1216 |
)
|
1217 |
|
|
|
|
|
|
|
1218 |
lut_file.change(get_filename, inputs=[lut_file], outputs=[lut_filename])
|
1219 |
+
lut_filename.change(show_lut, inputs=[lut_filename, lut_example_image], outputs=[lut_example_image], scroll_to_output=True)
|
1220 |
apply_lut_button.click(
|
1221 |
lambda lut_filename, input_image: gr.Warning("Please upload an Input Image to get started.") if input_image is None else apply_lut_to_image_path(lut_filename, input_image)[1],
|
1222 |
inputs=[lut_filename, input_image],
|
1223 |
outputs=[input_image],
|
1224 |
scroll_to_output=True
|
1225 |
)
|
1226 |
+
with gr.Accordion("Color Composite", open = False):
|
1227 |
+
with gr.Row():
|
1228 |
+
composite_color = gr.ColorPicker(label="Color", value="#ede9ac44")
|
1229 |
+
composite_opacity = gr.Slider(label="Opacity %", minimum=0, maximum=100, value=50, interactive=True)
|
1230 |
+
with gr.Row():
|
1231 |
+
composite_button = gr.Button("Composite", elem_classes="solid")
|
1232 |
with gr.Row():
|
1233 |
with gr.Accordion("Generate AI Image (click here for options)", open = False):
|
1234 |
with gr.Row():
|
|
|
1247 |
lines=2,
|
1248 |
visible=False
|
1249 |
)
|
1250 |
+
with gr.Accordion("Choose Image Style*", open=True):
|
1251 |
lora_gallery = gr.Gallery(
|
1252 |
[(open_image(image_path), title) for image_path, title in lora_models],
|
1253 |
label="Styles",
|
|
|
1473 |
scroll_to_output=True
|
1474 |
)
|
1475 |
generate_input_image.click(
|
1476 |
+
fn=unload_3d_models,
|
1477 |
+
trigger_mode="always_last",
|
1478 |
+
outputs=[generate_3d_asset_button]
|
1479 |
+
).then(
|
1480 |
fn=generate_input_image_click,
|
1481 |
inputs=[input_image,map_options, prompt_textbox, negative_prompt_textbox, model_textbox, randomize_seed, seed_slider, gr.State(False), sketch_image, image_guidance_stength, image_size_ratio],
|
1482 |
outputs=[input_image, seed_slider], scroll_to_output=True
|
|
|
1534 |
|
1535 |
#use conditioned_image as the input_image for generate_input_image_click
|
1536 |
generate_input_image_from_gallery.click(
|
1537 |
+
fn=unload_3d_models,
|
1538 |
+
trigger_mode="always_last",
|
1539 |
+
outputs=[generate_3d_asset_button]
|
1540 |
+
).then(
|
1541 |
fn=generate_input_image_click,
|
1542 |
inputs=[input_image, map_options, prompt_textbox, negative_prompt_textbox, model_textbox,randomize_seed, seed_slider, gr.State(True), sketch_image , image_guidance_stength, image_size_ratio],
|
1543 |
outputs=[input_image, seed_slider], scroll_to_output=True
|
|
|
1595 |
# outputs=[depth_map_output, model_output, model_file], scroll_to_output=True
|
1596 |
# )
|
1597 |
accordian_3d.expand(
|
1598 |
+
fn=load_trellis_model,
|
1599 |
+
trigger_mode="always_last"
|
1600 |
+
).then(
|
1601 |
fn=load_3d_models,
|
1602 |
trigger_mode="always_last",
|
1603 |
outputs=[generate_3d_asset_button],
|
|
|
1675 |
|
1676 |
# image_processor = DPTImageProcessor.from_pretrained("Intel/dpt-large")
|
1677 |
# depth_model = DPTForDepthEstimation.from_pretrained("Intel/dpt-large", ignore_mismatched_sizes=True)
|
1678 |
+
# if constants.IS_SHARED_SPACE:
|
1679 |
+
# TRELLIS_PIPELINE = TrellisImageTo3DPipeline.from_pretrained("JeffreyXiang/TRELLIS-image-large")
|
1680 |
+
# TRELLIS_PIPELINE.to(device)
|
1681 |
+
# try:
|
1682 |
+
# TRELLIS_PIPELINE.preprocess_image(Image.fromarray(np.zeros((512, 512, 3), dtype=np.uint8))) # Preload rembg
|
1683 |
+
# except:
|
1684 |
+
# pass
|
1685 |
hexaGrid.queue(default_concurrency_limit=1,max_size=12,api_open=False)
|
1686 |
hexaGrid.launch(allowed_paths=["assets","/","./assets","images","./images", "./images/prerendered", 'e:/TMP'], favicon_path="./assets/favicon.ico", max_file_size="10mb")
|
1687 |
|
trellis/pipelines/base.py
CHANGED
@@ -11,12 +11,15 @@ class Pipeline:
|
|
11 |
def __init__(
|
12 |
self,
|
13 |
models: dict[str, nn.Module] = None,
|
|
|
14 |
):
|
15 |
if models is None:
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
model.
|
|
|
|
|
20 |
|
21 |
@staticmethod
|
22 |
def from_pretrained(path: str) -> "Pipeline":
|
@@ -41,7 +44,7 @@ class Pipeline:
|
|
41 |
for k, v in args['models'].items()
|
42 |
}
|
43 |
|
44 |
-
new_pipeline = Pipeline(_models)
|
45 |
new_pipeline._pretrained_args = args
|
46 |
return new_pipeline
|
47 |
|
|
|
11 |
def __init__(
|
12 |
self,
|
13 |
models: dict[str, nn.Module] = None,
|
14 |
+
device: torch.device = torch.device("cpu")
|
15 |
):
|
16 |
if models is None:
|
17 |
+
self.models = {}
|
18 |
+
else:
|
19 |
+
self.models = models
|
20 |
+
for model in self.models.values():
|
21 |
+
model.eval()
|
22 |
+
self.to(device)
|
23 |
|
24 |
@staticmethod
|
25 |
def from_pretrained(path: str) -> "Pipeline":
|
|
|
44 |
for k, v in args['models'].items()
|
45 |
}
|
46 |
|
47 |
+
new_pipeline = Pipeline(_models) # defaults to cpu
|
48 |
new_pipeline._pretrained_args = args
|
49 |
return new_pipeline
|
50 |
|
utils/image_utils.py
CHANGED
@@ -528,24 +528,6 @@ def resize_and_crop_image(image: Image, new_width: int = 512, new_height: int =
|
|
528 |
|
529 |
##################################################### LUTs ############################################################
|
530 |
|
531 |
-
class Color1DLUT(ImageFilter.Filter):
|
532 |
-
"""Custom filter to apply a 1D LUT to an RGB image."""
|
533 |
-
def __init__(self, table, size):
|
534 |
-
self.table = table
|
535 |
-
self.size = size
|
536 |
-
if size != 256:
|
537 |
-
raise ValueError("Only 1D LUTs with size 256 are supported")
|
538 |
-
# Create a 768-entry LUT (256 for R, G, B) scaled to 0-255
|
539 |
-
lut_r = [int(table[i][0] * 255) for i in range(256)]
|
540 |
-
lut_g = [int(table[i][1] * 255) for i in range(256)]
|
541 |
-
lut_b = [int(table[i][2] * 255) for i in range(256)]
|
542 |
-
self.lut = lut_r + lut_g + lut_b
|
543 |
-
|
544 |
-
def filter(self, image):
|
545 |
-
if image.mode != 'RGB':
|
546 |
-
image = image.convert('RGB')
|
547 |
-
return image.point(self.lut)
|
548 |
-
|
549 |
def is_3dlut_row(row: List[str]) -> bool:
|
550 |
"""
|
551 |
Check if one line in the file has exactly 3 numeric values.
|
@@ -562,25 +544,8 @@ def is_3dlut_row(row: List[str]) -> bool:
|
|
562 |
except ValueError:
|
563 |
return False
|
564 |
|
565 |
-
def
|
566 |
-
"""
|
567 |
-
Read a LUT from a .cube file and return a filter object.
|
568 |
-
|
569 |
-
Detects whether the file contains a 1D or 3D LUT based on keywords
|
570 |
-
"LUT_1D_SIZE" or "LUT_3D_SIZE". Initially assumes a 3D LUT if no size
|
571 |
-
keyword is specified.
|
572 |
|
573 |
-
Args:
|
574 |
-
path_lut: Path to the LUT file (string or os.PathLike).
|
575 |
-
num_channels: Number of color channels in the LUT (default is 3).
|
576 |
-
|
577 |
-
Returns:
|
578 |
-
ImageFilter.Color3DLUT for 3D LUTs or Color1DLUT for 1D LUTs.
|
579 |
-
|
580 |
-
Raises:
|
581 |
-
FileNotFoundError: If the file does not exist.
|
582 |
-
ValueError: If the LUT data is invalid or size mismatches.
|
583 |
-
"""
|
584 |
with open(path_lut) as f:
|
585 |
lines = f.read().splitlines()
|
586 |
|
@@ -602,28 +567,10 @@ def read_lut(path_lut: Union[str, os.PathLike], num_channels: int = 3) -> Union[
|
|
602 |
lut_type = "1D"
|
603 |
elif is_3dlut_row(parts):
|
604 |
table.append(tuple(float(val) for val in parts))
|
|
|
|
|
605 |
|
606 |
-
|
607 |
-
if lut_type == "3D":
|
608 |
-
if size is None:
|
609 |
-
# Calculate size assuming 3D LUT
|
610 |
-
len_table = len(table)
|
611 |
-
if len_table == 0:
|
612 |
-
raise ValueError("No valid LUT data found")
|
613 |
-
size = round(len_table ** (1 / 3))
|
614 |
-
if size ** 3 != len_table:
|
615 |
-
raise ValueError(f"Number of table entries {len_table} is not a perfect cube")
|
616 |
-
elif len(table) != size ** 3:
|
617 |
-
raise ValueError(f"Expected {size**3} entries for 3D LUT, got {len(table)}")
|
618 |
-
return ImageFilter.Color3DLUT(size, table, channels=num_channels)
|
619 |
-
else: # lut_type == "1D"
|
620 |
-
if size is None:
|
621 |
-
raise ValueError("LUT_1D_SIZE not specified for 1D LUT")
|
622 |
-
if len(table) != size:
|
623 |
-
raise ValueError(f"Expected {size} entries for 1D LUT, got {len(table)}")
|
624 |
-
return Color1DLUT(table, size)
|
625 |
-
|
626 |
-
def read_3Dlut(path_lut: Union[str, os.PathLike], num_channels: int = 3) -> ImageFilter.Color3DLUT:
|
627 |
"""
|
628 |
Read LUT from a raw file.
|
629 |
|
@@ -647,7 +594,31 @@ def read_3Dlut(path_lut: Union[str, os.PathLike], num_channels: int = 3) -> Imag
|
|
647 |
lut_table = [row2val(row) for row in lut_raw if is_3dlut_row(row.split(" "))]
|
648 |
return ImageFilter.Color3DLUT(size, lut_table, num_channels)
|
649 |
|
650 |
-
def
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
651 |
"""
|
652 |
Apply a LUT to an image and return a PIL Image with the LUT applied.
|
653 |
|
@@ -667,11 +638,30 @@ def apply_lut(img: Image, lut_path: str = "", lut: ImageFilter.Color3DLUT = None
|
|
667 |
if lut is None:
|
668 |
if lut_path == "":
|
669 |
raise ValueError("Either lut_path or lut argument must be provided.")
|
670 |
-
lut =
|
671 |
return img.filter(lut)
|
672 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
673 |
def show_lut(lut_filename: str, lut_example_image: Image = default_lut_example_img) -> Image:
|
674 |
-
if lut_filename is not None:
|
675 |
try:
|
676 |
lut_example_image = apply_lut(lut_example_image, lut_filename)
|
677 |
except Exception as e:
|
@@ -704,10 +694,6 @@ def apply_1d_lut(image, lut_file):
|
|
704 |
rgb_image = Image.fromarray((colors * 255).astype(np.uint8), mode='RGB')
|
705 |
return rgb_image
|
706 |
|
707 |
-
|
708 |
-
|
709 |
-
|
710 |
-
|
711 |
def apply_lut_to_image_path(lut_filename: str, image_path: str) -> tuple[Image, str]:
|
712 |
"""
|
713 |
Apply a LUT to an image and return the result.
|
|
|
528 |
|
529 |
##################################################### LUTs ############################################################
|
530 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
531 |
def is_3dlut_row(row: List[str]) -> bool:
|
532 |
"""
|
533 |
Check if one line in the file has exactly 3 numeric values.
|
|
|
544 |
except ValueError:
|
545 |
return False
|
546 |
|
547 |
+
def get_lut_type(path_lut: Union[str, os.PathLike], num_channels: int = 3) -> str:
|
|
|
|
|
|
|
|
|
|
|
|
|
548 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
549 |
with open(path_lut) as f:
|
550 |
lines = f.read().splitlines()
|
551 |
|
|
|
567 |
lut_type = "1D"
|
568 |
elif is_3dlut_row(parts):
|
569 |
table.append(tuple(float(val) for val in parts))
|
570 |
+
return lut_type
|
571 |
+
|
572 |
|
573 |
+
def read_3d_lut(path_lut: Union[str, os.PathLike], num_channels: int = 3) -> ImageFilter.Color3DLUT:
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
574 |
"""
|
575 |
Read LUT from a raw file.
|
576 |
|
|
|
594 |
lut_table = [row2val(row) for row in lut_raw if is_3dlut_row(row.split(" "))]
|
595 |
return ImageFilter.Color3DLUT(size, lut_table, num_channels)
|
596 |
|
597 |
+
def apply_1d_lut(image, lut_file):
|
598 |
+
# Read the 1D LUT
|
599 |
+
with open(lut_file) as f:
|
600 |
+
lines = f.read().splitlines()
|
601 |
+
table = []
|
602 |
+
for line in lines:
|
603 |
+
if not line.startswith(("#", "LUT", "TITLE", "DOMAIN")) and line.strip():
|
604 |
+
values = [float(v) for v in line.split()]
|
605 |
+
table.append(tuple(values))
|
606 |
+
|
607 |
+
# Convert image to grayscale
|
608 |
+
if image.mode != 'L':
|
609 |
+
image = image.convert('L')
|
610 |
+
img_array = np.array(image) / 255.0 # Normalize to [0, 1]
|
611 |
+
|
612 |
+
# Map grayscale values to colors
|
613 |
+
lut_size = len(table)
|
614 |
+
indices = (img_array * (lut_size - 1)).astype(int)
|
615 |
+
colors = np.array(table)[indices]
|
616 |
+
|
617 |
+
# Create RGB image
|
618 |
+
rgb_image = Image.fromarray((colors * 255).astype(np.uint8), mode='RGB')
|
619 |
+
return rgb_image
|
620 |
+
|
621 |
+
def apply_3d_lut(img: Image, lut_path: str = "", lut: ImageFilter.Color3DLUT = None) -> Image:
|
622 |
"""
|
623 |
Apply a LUT to an image and return a PIL Image with the LUT applied.
|
624 |
|
|
|
638 |
if lut is None:
|
639 |
if lut_path == "":
|
640 |
raise ValueError("Either lut_path or lut argument must be provided.")
|
641 |
+
lut = read_3d_lut(lut_path)
|
642 |
return img.filter(lut)
|
643 |
|
644 |
+
def apply_lut(image, lut_filename: str) -> Image:
|
645 |
+
"""
|
646 |
+
Apply a LUT to an image and return the result.
|
647 |
+
Args:
|
648 |
+
image (str or PIL.Image.Image): The image to apply the LUT to.
|
649 |
+
lut_filename (str): The path to the LUT file.
|
650 |
+
Returns:
|
651 |
+
PIL.Image.Image: The image with the LUT applied.
|
652 |
+
"""
|
653 |
+
if isinstance(image, str):
|
654 |
+
image = open_image(image)
|
655 |
+
if lut_filename is not None:
|
656 |
+
if (get_lut_type(lut_filename) == "3D"):
|
657 |
+
lut = read_3d_lut(lut_filename)
|
658 |
+
image = apply_3d_lut(image, lut=lut)
|
659 |
+
else:
|
660 |
+
image = apply_1d_lut(image, lut_filename)
|
661 |
+
return image
|
662 |
+
|
663 |
def show_lut(lut_filename: str, lut_example_image: Image = default_lut_example_img) -> Image:
|
664 |
+
if lut_filename is not None:
|
665 |
try:
|
666 |
lut_example_image = apply_lut(lut_example_image, lut_filename)
|
667 |
except Exception as e:
|
|
|
694 |
rgb_image = Image.fromarray((colors * 255).astype(np.uint8), mode='RGB')
|
695 |
return rgb_image
|
696 |
|
|
|
|
|
|
|
|
|
697 |
def apply_lut_to_image_path(lut_filename: str, image_path: str) -> tuple[Image, str]:
|
698 |
"""
|
699 |
Apply a LUT to an image and return the result.
|