wenkai commited on
Commit
dd9c8e6
1 Parent(s): 97b5d9b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -10
app.py CHANGED
@@ -7,7 +7,7 @@ from lavis.models.protein_models.protein_function_opt import Blip2ProteinMistral
7
  from lavis.models.base_model import FAPMConfig
8
  import spaces
9
  import gradio as gr
10
-
11
 
12
  # Load the model
13
  model = Blip2ProteinMistral(config=FAPMConfig(), esm_size='3b')
@@ -18,16 +18,14 @@ model.to('cuda')
18
  @spaces.GPU
19
  def generate_caption(protein, prompt):
20
  # Process the image and the prompt
21
- print(f"system path: {os.getcwd()}")
22
- with open('/home/user/app/example.fasta', 'w') as f:
23
- f.write('>{}\n'.format("protein_name"))
24
- f.write('{}\n'.format(protein.strip()))
25
- print(f"fasta prepared")
26
- os.system("python esm_scripts/extract.py esm2_t36_3B_UR50D /home/user/app/example.fasta /home/user/app --repr_layers 36 --truncation_seq_length 1024 --include per_tok")
27
- print(f"protein pt file prepared")
28
- esm_emb = torch.load("/home/user/app/protein_name.pt")['representations'][36]
29
  esm_emb = F.pad(esm_emb.t(), (0, 1024 - len(esm_emb))).t().to('cuda')
30
- samples = {'name': ['test_protein'],
31
  'image': torch.unsqueeze(esm_emb, dim=0),
32
  'text_input': ['none'],
33
  'prompt': [prompt]}
 
7
  from lavis.models.base_model import FAPMConfig
8
  import spaces
9
  import gradio as gr
10
+ from esm_scripts.extract import run_demo
11
 
12
  # Load the model
13
  model = Blip2ProteinMistral(config=FAPMConfig(), esm_size='3b')
 
18
  @spaces.GPU
19
  def generate_caption(protein, prompt):
20
  # Process the image and the prompt
21
+ # with open('/home/user/app/example.fasta', 'w') as f:
22
+ # f.write('>{}\n'.format("protein_name"))
23
+ # f.write('{}\n'.format(protein.strip()))
24
+ # os.system("python esm_scripts/extract.py esm2_t36_3B_UR50D /home/user/app/example.fasta /home/user/app --repr_layers 36 --truncation_seq_length 1024 --include per_tok")
25
+ esm_emb = run_demo(protein_name='protein_name', protein_seq=protein, model_location='esm2_t36_3B_UR50D',
26
+ include='per_tok', repr_layers=36, truncation_seq_length=1024)
 
 
27
  esm_emb = F.pad(esm_emb.t(), (0, 1024 - len(esm_emb))).t().to('cuda')
28
+ samples = {'name': ['protein_name'],
29
  'image': torch.unsqueeze(esm_emb, dim=0),
30
  'text_input': ['none'],
31
  'prompt': [prompt]}