aksell commited on
Commit
4ff156e
1 Parent(s): 2d77790

Explain why get_attention re-parses the pdb

Browse files
Files changed (1) hide show
  1. hexviz/attention.py +6 -0
hexviz/attention.py CHANGED
@@ -125,6 +125,12 @@ def unidirectional_avg_filtered(attention, layer, head, threshold):
125
  unidirectional_avg_for_head.append((avg, i, j))
126
  return unidirectional_avg_for_head
127
 
 
 
 
 
 
 
128
  @st.cache
129
  def get_attention_pairs(pdb_str: str, layer: int, head: int, chain_ids: str | None ,threshold: int = 0.2, model_type: ModelType = ModelType.TAPE_BERT, top_n: int = 2):
130
  structure = PDBParser().get_structure("pdb", StringIO(pdb_str))
 
125
  unidirectional_avg_for_head.append((avg, i, j))
126
  return unidirectional_avg_for_head
127
 
128
+
129
+ # Passing the pdb_str here is a workaround for streamlit caching
130
+ # where I need the input to be hashable and not changing
131
+ # The ideal would be to pass in the structure directly, not parsing
132
+ # Thist twice. If streamlit is upgaded to past 0.17 this can be
133
+ # fixed.
134
  @st.cache
135
  def get_attention_pairs(pdb_str: str, layer: int, head: int, chain_ids: str | None ,threshold: int = 0.2, model_type: ModelType = ModelType.TAPE_BERT, top_n: int = 2):
136
  structure = PDBParser().get_structure("pdb", StringIO(pdb_str))