AndyRaoTHU commited on
Commit
25f0f68
·
1 Parent(s): 5a8168e
Files changed (1) hide show
  1. app.py +75 -58
app.py CHANGED
@@ -193,70 +193,87 @@ class Handler:
193
  # print("Shapes:", img.shape, basevq_rec.shape, vqgan_rec.shape, revq_rec.shape)
194
  # return img, basevq_rec, vqgan_rec, revq_rec
195
  return basevq_rec, vqgan_rec, optvq_rec
196
-
197
  def draw_process(x, y, std):
198
  img = (np.random.rand(256, 256, 3) * 255).astype(np.uint8)
199
  return img, img
200
 
201
- if __name__ == "__main__":
202
- # create the model handler
203
- # handler = Handler(device=device)
 
 
 
 
 
 
 
 
 
 
 
204
 
205
- print("Creating Gradio interface...")
206
 
207
- demo2 = gr.Interface(
208
- fn=draw_process,
209
- inputs=[
210
- gr.Slider(label="x", value=0, minimum=-10, maximum=10, step=0.1),
211
- gr.Slider(label="y", value=0, minimum=-10, maximum=10, step=0.1),
212
- gr.Slider(label="std", value=1, minimum=0, maximum=5, step=0.1)
213
- ],
214
- outputs=[
215
- gr.Image(label="NN", type="numpy"),
216
- gr.Image(label="OptVQ", type="numpy")
217
- ],
218
- title="Demo 2: 2D Matching Visualization",
219
- description="Visualize nearest neighbor vs. optimal transport matching for synthetic 2D data."
220
- )
221
 
222
- # 合并两个 interface 成 Tabbed UI
223
- demo = gr.TabbedInterface(
224
- interface_list=[demo2],
225
- tab_names=["2D Matching"]
226
- )
227
 
228
- demo.launch()
229
-
230
- # create the interface
231
- # with gr.Blocks() as demo:
232
- # gr.Textbox(value="This demo shows the image reconstruction comparison between ReVQ and other methods. The input image is resized to 256 x 256 and then fed into the models. The output images are the reconstructed images from the latent codes.", label="Demo 1: Image reconstruction results")
233
- # with gr.Row():
234
- # with gr.Column():
235
- # image_input = gr.Image(label="Input data", image_mode="RGB", type="numpy")
236
- # btn_demo1 = gr.Button(value="Run reconstruction")
237
- # image_basevq = gr.Image(label="BaseVQ rec.")
238
- # image_vqgan = gr.Image(label="VQGAN rec.")
239
- # image_revq = gr.Image(label="ReVQ rec.")
240
- # btn_demo1.click(fn=handler.process_image, inputs=[image_input], outputs=[image_basevq, image_vqgan, image_revq])
241
-
242
- # gr.Textbox(value="This demo shows the 2D visualizations of nearest neighbor and optimal transport (OptVQ) methods. The data points are randomly generated from a normal distribution, and the matching results are shown as arrows with different colors.", label="Demo 2: 2D visualizations of matching results")
243
- # gr.Markdown("### Demo 2: 2D visualizations of matching results\n"
244
- # "This demo shows the 2D visualizations of nearest neighbor and optimal transport (OptVQ) methods. "
245
- # "The data points are randomly generated from a normal distribution, and the matching results are shown as arrows with different colors.")
246
- # with gr.Row():
247
- # with gr.Column():
248
- # input_x = gr.Slider(label="x", value=0, minimum=-10, maximum=10, step=0.1)
249
- # input_y = gr.Slider(label="y", value=0, minimum=-10, maximum=10, step=0.1)
250
- # input_std = gr.Slider(label="std", value=1, minimum=0, maximum=5, step=0.1)
251
- # btn_demo2 = gr.Button(value="Run 2D example")
252
- # output_nn = gr.Image(label="NN", interactive=False, type="numpy")
253
- # output_optvq = gr.Image(label="OptVQ", interactive=False, type="numpy")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
254
 
255
- # # set the function
256
- # input_x.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
257
- # input_y.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
258
- # input_std.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
259
- # btn_demo2.click(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
260
- # btn_demo2.click(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
261
-
262
- # demo.launch()
 
193
  # print("Shapes:", img.shape, basevq_rec.shape, vqgan_rec.shape, revq_rec.shape)
194
  # return img, basevq_rec, vqgan_rec, revq_rec
195
  return basevq_rec, vqgan_rec, optvq_rec
196
+
197
  def draw_process(x, y, std):
198
  img = (np.random.rand(256, 256, 3) * 255).astype(np.uint8)
199
  return img, img
200
 
201
+ demo2 = gr.Interface(
202
+ fn=draw_process,
203
+ inputs=[
204
+ gr.Slider(label="x", value=0, minimum=-10, maximum=10, step=0.1),
205
+ gr.Slider(label="y", value=0, minimum=-10, maximum=10, step=0.1),
206
+ gr.Slider(label="std", value=1, minimum=0, maximum=5, step=0.1)
207
+ ],
208
+ outputs=[
209
+ gr.Image(label="NN", type="numpy"),
210
+ gr.Image(label="OptVQ", type="numpy")
211
+ ],
212
+ title="Demo 2: 2D Matching Visualization",
213
+ description="Visualize nearest neighbor vs. optimal transport matching for synthetic 2D data."
214
+ )
215
 
216
+ demo2.launch()
217
 
218
+ # if __name__ == "__main__":
219
+ # # create the model handler
220
+ # # handler = Handler(device=device)
 
 
 
 
 
 
 
 
 
 
 
221
 
222
+ # print("Creating Gradio interface...")
 
 
 
 
223
 
224
+ # demo2 = gr.Interface(
225
+ # fn=draw_process,
226
+ # inputs=[
227
+ # gr.Slider(label="x", value=0, minimum=-10, maximum=10, step=0.1),
228
+ # gr.Slider(label="y", value=0, minimum=-10, maximum=10, step=0.1),
229
+ # gr.Slider(label="std", value=1, minimum=0, maximum=5, step=0.1)
230
+ # ],
231
+ # outputs=[
232
+ # gr.Image(label="NN", type="numpy"),
233
+ # gr.Image(label="OptVQ", type="numpy")
234
+ # ],
235
+ # title="Demo 2: 2D Matching Visualization",
236
+ # description="Visualize nearest neighbor vs. optimal transport matching for synthetic 2D data."
237
+ # )
238
+
239
+ # # 合并两个 interface Tabbed UI
240
+ # demo = gr.TabbedInterface(
241
+ # interface_list=[demo2],
242
+ # tab_names=["2D Matching"]
243
+ # )
244
+
245
+ # demo.launch()
246
+
247
+ # # create the interface
248
+ # # with gr.Blocks() as demo:
249
+ # # gr.Textbox(value="This demo shows the image reconstruction comparison between ReVQ and other methods. The input image is resized to 256 x 256 and then fed into the models. The output images are the reconstructed images from the latent codes.", label="Demo 1: Image reconstruction results")
250
+ # # with gr.Row():
251
+ # # with gr.Column():
252
+ # # image_input = gr.Image(label="Input data", image_mode="RGB", type="numpy")
253
+ # # btn_demo1 = gr.Button(value="Run reconstruction")
254
+ # # image_basevq = gr.Image(label="BaseVQ rec.")
255
+ # # image_vqgan = gr.Image(label="VQGAN rec.")
256
+ # # image_revq = gr.Image(label="ReVQ rec.")
257
+ # # btn_demo1.click(fn=handler.process_image, inputs=[image_input], outputs=[image_basevq, image_vqgan, image_revq])
258
+
259
+ # # gr.Textbox(value="This demo shows the 2D visualizations of nearest neighbor and optimal transport (OptVQ) methods. The data points are randomly generated from a normal distribution, and the matching results are shown as arrows with different colors.", label="Demo 2: 2D visualizations of matching results")
260
+ # # gr.Markdown("### Demo 2: 2D visualizations of matching results\n"
261
+ # # "This demo shows the 2D visualizations of nearest neighbor and optimal transport (OptVQ) methods. "
262
+ # # "The data points are randomly generated from a normal distribution, and the matching results are shown as arrows with different colors.")
263
+ # # with gr.Row():
264
+ # # with gr.Column():
265
+ # # input_x = gr.Slider(label="x", value=0, minimum=-10, maximum=10, step=0.1)
266
+ # # input_y = gr.Slider(label="y", value=0, minimum=-10, maximum=10, step=0.1)
267
+ # # input_std = gr.Slider(label="std", value=1, minimum=0, maximum=5, step=0.1)
268
+ # # btn_demo2 = gr.Button(value="Run 2D example")
269
+ # # output_nn = gr.Image(label="NN", interactive=False, type="numpy")
270
+ # # output_optvq = gr.Image(label="OptVQ", interactive=False, type="numpy")
271
 
272
+ # # # set the function
273
+ # # input_x.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
274
+ # # input_y.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
275
+ # # input_std.change(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
276
+ # # btn_demo2.click(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
277
+ # # btn_demo2.click(fn=draw_process, inputs=[input_x, input_y, input_std], outputs=[output_nn, output_optvq])
278
+
279
+ # # demo.launch()