cleardusk commited on
Commit
dfa0990
1 Parent(s): f647a26

chore: update driving videos, minor desription change

Browse files
.gitattributes CHANGED
@@ -53,3 +53,5 @@ assets/examples/driving/d14_trim.mp4 filter=lfs diff=lfs merge=lfs -text
53
  assets/examples/driving/d6_trim.mp4 filter=lfs diff=lfs merge=lfs -text
54
  assets/examples/driving/d15.mp4 filter=lfs diff=lfs merge=lfs -text
55
  assets/examples/driving/d16.mp4 filter=lfs diff=lfs merge=lfs -text
 
 
 
53
  assets/examples/driving/d6_trim.mp4 filter=lfs diff=lfs merge=lfs -text
54
  assets/examples/driving/d15.mp4 filter=lfs diff=lfs merge=lfs -text
55
  assets/examples/driving/d16.mp4 filter=lfs diff=lfs merge=lfs -text
56
+ assets/examples/driving/d18.mp4 filter=lfs diff=lfs merge=lfs -text
57
+ assets/examples/driving/d19.mp4 filter=lfs diff=lfs merge=lfs -text
app.py CHANGED
@@ -64,8 +64,8 @@ data_examples = [
64
  [osp.join(example_portrait_dir, "s9.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
65
  [osp.join(example_portrait_dir, "s6.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
66
  [osp.join(example_portrait_dir, "s10.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
67
- [osp.join(example_portrait_dir, "s5.jpg"), osp.join(example_video_dir, "d15.mp4"), True, True, True, True],
68
- [osp.join(example_portrait_dir, "s7.jpg"), osp.join(example_video_dir, "d16.mp4"), True, True, True, True],
69
  ]
70
  #################### interface logic ####################
71
 
@@ -101,8 +101,8 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
101
  gr.Examples(
102
  examples=[
103
  [osp.join(example_video_dir, "d0.mp4")],
104
- [osp.join(example_video_dir, "d15.mp4")],
105
- [osp.join(example_video_dir, "d16.mp4")],
106
  [osp.join(example_video_dir, "d14_trim.mp4")],
107
  [osp.join(example_video_dir, "d6_trim.mp4")],
108
  ],
 
64
  [osp.join(example_portrait_dir, "s9.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
65
  [osp.join(example_portrait_dir, "s6.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
66
  [osp.join(example_portrait_dir, "s10.jpg"), osp.join(example_video_dir, "d0.mp4"), True, True, True, True],
67
+ [osp.join(example_portrait_dir, "s5.jpg"), osp.join(example_video_dir, "d18.mp4"), True, True, True, True],
68
+ [osp.join(example_portrait_dir, "s7.jpg"), osp.join(example_video_dir, "d19.mp4"), True, True, True, True],
69
  ]
70
  #################### interface logic ####################
71
 
 
101
  gr.Examples(
102
  examples=[
103
  [osp.join(example_video_dir, "d0.mp4")],
104
+ [osp.join(example_video_dir, "d18.mp4")],
105
+ [osp.join(example_video_dir, "d19.mp4")],
106
  [osp.join(example_video_dir, "d14_trim.mp4")],
107
  [osp.join(example_video_dir, "d6_trim.mp4")],
108
  ],
assets/examples/driving/d18.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1dc94c1fec7ef7dc831c8a49f0e1788ae568812cb68e62f6875d9070f573d02a
3
+ size 187263
assets/examples/driving/d19.mp4 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3047ba66296d96b8a4584e412e61493d7bc0fa5149c77b130e7feea375e698bd
3
+ size 232859
assets/gradio_description_upload.md CHANGED
@@ -1,2 +1,2 @@
1
  ## 🤗 This is the official gradio demo for **LivePortrait**.
2
- <div style="font-size: 1.2em;">Please upload or use webcam to get a <strong>Source Portrait</strong> and upload a <strong>Driving Video</strong> with 1:1 aspect ratio.</div>
 
1
  ## 🤗 This is the official gradio demo for **LivePortrait**.
2
+ <div style="font-size: 1.2em;">Please upload or use a webcam to get a <strong>Source Portrait</strong> (any aspect ratio) and upload a <strong>Driving Video</strong> (1:1 aspect ratio).</div>
src/gradio_pipeline.py CHANGED
@@ -62,7 +62,7 @@ class GradioPipeline(LivePortraitPipeline):
62
  f_s_user, x_s_user, source_lmk_user, crop_M_c2o, mask_ori, img_rgb = \
63
  self.prepare_retargeting(input_image, flag_do_crop)
64
 
65
- if input_eye_ratio is None or input_eye_ratio is None:
66
  raise gr.Error("Invalid ratio input 💥!", duration=5)
67
  else:
68
  x_s_user = x_s_user.to("cuda")
@@ -109,4 +109,4 @@ class GradioPipeline(LivePortraitPipeline):
109
  return f_s_user, x_s_user, source_lmk_user, crop_M_c2o, mask_ori, img_rgb
110
  else:
111
  # when press the clear button, go here
112
- raise gr.Error("The retargeting input hasn't been prepared yet 💥!", duration=5)
 
62
  f_s_user, x_s_user, source_lmk_user, crop_M_c2o, mask_ori, img_rgb = \
63
  self.prepare_retargeting(input_image, flag_do_crop)
64
 
65
+ if input_eye_ratio is None or input_lip_ratio is None:
66
  raise gr.Error("Invalid ratio input 💥!", duration=5)
67
  else:
68
  x_s_user = x_s_user.to("cuda")
 
109
  return f_s_user, x_s_user, source_lmk_user, crop_M_c2o, mask_ori, img_rgb
110
  else:
111
  # when press the clear button, go here
112
+ raise gr.Error("Please upload a source portrait as the retargeting input 🤗🤗🤗", duration=5)