jbilcke-hf HF staff commited on
Commit
3827e0c
1 Parent(s): 1c1a4ec

we are good

Browse files
Files changed (2) hide show
  1. src/app/main.tsx +13 -13
  2. src/app/store.ts +2 -5
src/app/main.tsx CHANGED
@@ -182,7 +182,6 @@ export function Main() {
182
  if (!clap) { throw new Error(`failed to edit the entities`) }
183
 
184
  console.log(`handleSubmit(): received a clap with entities = `, clap)
185
- setCurrentClap(clap)
186
  setAssetGenerationStatus("finished")
187
  console.log("---------------- GENERATED ENTITIES ----------------")
188
  console.table(clap.entities, [
@@ -211,7 +210,6 @@ export function Main() {
211
  if (!clap) { throw new Error(`failed to edit the sound`) }
212
 
213
  console.log(`handleSubmit(): received a clap with sound = `, clap)
214
- setCurrentClap(clap)
215
  setSoundGenerationStatus("finished")
216
  console.log("---------------- GENERATED SOUND ----------------")
217
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND), [
@@ -239,7 +237,6 @@ export function Main() {
239
  if (!clap) { throw new Error(`failed to edit the music`) }
240
 
241
  console.log(`handleSubmit(): received a clap with music = `, clap)
242
- setCurrentClap(clap)
243
  setMusicGenerationStatus("finished")
244
  console.log("---------------- GENERATED MUSIC ----------------")
245
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC), [
@@ -270,7 +267,6 @@ export function Main() {
270
 
271
  // const fusion =
272
  console.log(`handleSubmit(): received a clap with images = `, clap)
273
- setCurrentClap(clap)
274
  setImageGenerationStatus("finished")
275
  console.log("---------------- GENERATED STORYBOARDS ----------------")
276
  clap.segments
@@ -309,7 +305,6 @@ export function Main() {
309
  if (!clap) { throw new Error(`failed to edit the videos`) }
310
 
311
  console.log(`handleSubmit(): received a clap with videos = `, clap)
312
- setCurrentClap(clap)
313
  setVideoGenerationStatus("finished")
314
  console.log("---------------- GENERATED VIDEOS ----------------")
315
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO), [
@@ -343,7 +338,6 @@ export function Main() {
343
  if (!clap) { throw new Error(`failed to edit the dialogues`) }
344
 
345
  console.log(`handleSubmit(): received a clap with dialogues = `, clap)
346
- setCurrentClap(clap)
347
  setVoiceGenerationStatus("finished")
348
  console.log("---------------- GENERATED DIALOGUES ----------------")
349
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.DIALOGUE), [
@@ -371,9 +365,9 @@ export function Main() {
371
 
372
  setCurrentVideo(assetUrl)
373
 
374
- if (assetUrl.length < 128) { throw new Error(`handleSubmit(): the generated video is too small, so we failed`) }
375
 
376
- console.log(`handleSubmit(): received a video: ${assetUrl.slice(0, 120)}...`)
377
  setFinalGenerationStatus("finished")
378
  return assetUrl
379
  } catch (err) {
@@ -395,6 +389,8 @@ export function Main() {
395
  try {
396
  let clap = await generateStory()
397
 
 
 
398
  const tasks = [
399
  generateMusic(clap),
400
  generateStoryboardsThenVideos(clap)
@@ -409,6 +405,7 @@ export function Main() {
409
  overwriteMeta: false,
410
  inlineReplace: true,
411
  })
 
412
  }
413
 
414
  /*
@@ -443,6 +440,7 @@ export function Main() {
443
 
444
 
445
  console.log("final clap: ", clap)
 
446
  await generateFinalVideo(clap)
447
 
448
  setStatus("finished")
@@ -473,7 +471,8 @@ export function Main() {
473
 
474
  try {
475
  let clap = await importStory(fileData)
476
-
 
477
  const claps = await Promise.all([
478
  generateMusic(clap),
479
  generateVideos(clap)
@@ -488,6 +487,7 @@ export function Main() {
488
  })
489
  }
490
 
 
491
  await generateFinalVideo(clap)
492
 
493
  setStatus("finished")
@@ -789,8 +789,8 @@ export function Main() {
789
  justify-between items-center
790
  space-x-3">
791
 
792
- {/*
793
- <Button
794
  onClick={openFilePicker}
795
  disabled={isBusy}
796
  // variant="ghost"
@@ -804,8 +804,8 @@ export function Main() {
804
  >
805
  <span className="hidden xl:inline mr-1">Load</span>
806
  <span className="inline xl:hidden mr-1">Load</span>
807
- </Button>
808
- */}
809
 
810
 
811
  {canSeeBetaFeatures ?
 
182
  if (!clap) { throw new Error(`failed to edit the entities`) }
183
 
184
  console.log(`handleSubmit(): received a clap with entities = `, clap)
 
185
  setAssetGenerationStatus("finished")
186
  console.log("---------------- GENERATED ENTITIES ----------------")
187
  console.table(clap.entities, [
 
210
  if (!clap) { throw new Error(`failed to edit the sound`) }
211
 
212
  console.log(`handleSubmit(): received a clap with sound = `, clap)
 
213
  setSoundGenerationStatus("finished")
214
  console.log("---------------- GENERATED SOUND ----------------")
215
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.SOUND), [
 
237
  if (!clap) { throw new Error(`failed to edit the music`) }
238
 
239
  console.log(`handleSubmit(): received a clap with music = `, clap)
 
240
  setMusicGenerationStatus("finished")
241
  console.log("---------------- GENERATED MUSIC ----------------")
242
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.MUSIC), [
 
267
 
268
  // const fusion =
269
  console.log(`handleSubmit(): received a clap with images = `, clap)
 
270
  setImageGenerationStatus("finished")
271
  console.log("---------------- GENERATED STORYBOARDS ----------------")
272
  clap.segments
 
305
  if (!clap) { throw new Error(`failed to edit the videos`) }
306
 
307
  console.log(`handleSubmit(): received a clap with videos = `, clap)
 
308
  setVideoGenerationStatus("finished")
309
  console.log("---------------- GENERATED VIDEOS ----------------")
310
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.VIDEO), [
 
338
  if (!clap) { throw new Error(`failed to edit the dialogues`) }
339
 
340
  console.log(`handleSubmit(): received a clap with dialogues = `, clap)
 
341
  setVoiceGenerationStatus("finished")
342
  console.log("---------------- GENERATED DIALOGUES ----------------")
343
  console.table(clap.segments.filter(s => s.category === ClapSegmentCategory.DIALOGUE), [
 
365
 
366
  setCurrentVideo(assetUrl)
367
 
368
+ if (assetUrl.length < 128) { throw new Error(`generateFinalVideo(): the generated video is too small, so we failed`) }
369
 
370
+ console.log(`generateFinalVideo(): received a video: ${assetUrl.slice(0, 120)}...`)
371
  setFinalGenerationStatus("finished")
372
  return assetUrl
373
  } catch (err) {
 
389
  try {
390
  let clap = await generateStory()
391
 
392
+ setCurrentClap(clap)
393
+
394
  const tasks = [
395
  generateMusic(clap),
396
  generateStoryboardsThenVideos(clap)
 
405
  overwriteMeta: false,
406
  inlineReplace: true,
407
  })
408
+ setCurrentClap(clap)
409
  }
410
 
411
  /*
 
440
 
441
 
442
  console.log("final clap: ", clap)
443
+ setCurrentClap(clap)
444
  await generateFinalVideo(clap)
445
 
446
  setStatus("finished")
 
471
 
472
  try {
473
  let clap = await importStory(fileData)
474
+
475
+ console.log("loadClap(): clap = ", clap)
476
  const claps = await Promise.all([
477
  generateMusic(clap),
478
  generateVideos(clap)
 
487
  })
488
  }
489
 
490
+ setCurrentClap(clap)
491
  await generateFinalVideo(clap)
492
 
493
  setStatus("finished")
 
789
  justify-between items-center
790
  space-x-3">
791
 
792
+
793
+ {canSeeBetaFeatures && <Button
794
  onClick={openFilePicker}
795
  disabled={isBusy}
796
  // variant="ghost"
 
804
  >
805
  <span className="hidden xl:inline mr-1">Load</span>
806
  <span className="inline xl:hidden mr-1">Load</span>
807
+ </Button>}
808
+
809
 
810
 
811
  {canSeeBetaFeatures ?
src/app/store.ts CHANGED
@@ -1,6 +1,6 @@
1
  "use client"
2
 
3
- import { ClapProject, parseClap, serializeClap, ClapMediaOrientation } from "@aitube/clap"
4
  import { create } from "zustand"
5
 
6
  import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
@@ -288,10 +288,7 @@ export const useStore = create<{
288
  storyPrompt
289
  )
290
 
291
- // TODO: parseVideoOrientation should be put inside @aitube/clap (in the utils)
292
- // const orientation = parseVideoOrientation(currentClap.meta.orientation)
293
- // let's use the UI settings for now
294
- const { orientation } = get()
295
 
296
  currentClap.meta.height = orientation === ClapMediaOrientation.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
297
  currentClap.meta.width = orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG
 
1
  "use client"
2
 
3
+ import { ClapProject, parseClap, serializeClap, ClapMediaOrientation, parseMediaOrientation } from "@aitube/clap"
4
  import { create } from "zustand"
5
 
6
  import { GenerationStage, GlobalStatus, TaskStatus } from "@/types"
 
288
  storyPrompt
289
  )
290
 
291
+ const orientation = parseMediaOrientation(currentClap.meta.orientation)
 
 
 
292
 
293
  currentClap.meta.height = orientation === ClapMediaOrientation.LANDSCAPE ? RESOLUTION_SHORT : RESOLUTION_LONG
294
  currentClap.meta.width = orientation === ClapMediaOrientation.PORTRAIT ? RESOLUTION_SHORT : RESOLUTION_LONG