Skip to content

Commit

Permalink
bugfix #1589
Browse files Browse the repository at this point in the history
  • Loading branch information
probinso committed Jul 20, 2018
1 parent accc625 commit e3e47ef
Showing 1 changed file with 7 additions and 5 deletions.
12 changes: 7 additions & 5 deletions gensim/models/atmodel.py
Original file line number Diff line number Diff line change
Expand Up @@ -391,15 +391,17 @@ def inference(self, chunk, author2doc, doc2author, rhot, collect_sstats=False, c
doc_no = d
# Get the IDs and counts of all the words in the current document.
# TODO: this is duplication of code in LdaModel. Refactor.

if doc and not isinstance(doc[0][0], six.integer_types):
# make sure the term IDs are ints, otherwise np will get upset
ids = [int(id) for id, _ in doc]
else:
ids = [id for id, _ in doc]
cts = np.array([cnt for _, cnt in doc])
ids = np.array(ids, dtype=np.integer)
cts = np.array([cnt for _, cnt in doc], dtype=np.integer)

# Get all authors in current document, and convert the author names to integer IDs.
authors_d = [self.author2id[a] for a in self.doc2author[doc_no]]
authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer)

gammad = self.state.gamma[authors_d, :] # gamma of document d before update.
tilde_gamma = gammad.copy() # gamma that will be updated.
Expand Down Expand Up @@ -828,9 +830,9 @@ def bound(self, chunk, chunk_doc_idx=None, subsample_ratio=1.0, author2doc=None,
else:
doc_no = d
# Get all authors in current document, and convert the author names to integer IDs.
authors_d = [self.author2id[a] for a in self.doc2author[doc_no]]
ids = np.array([id for id, _ in doc]) # Word IDs in doc.
cts = np.array([cnt for _, cnt in doc]) # Word counts.
authors_d = np.array([self.author2id[a] for a in self.doc2author[doc_no]], dtype=np.integer)
ids = np.array([id for id, _ in doc], dtype=np.integer) # Word IDs in doc.
cts = np.array([cnt for _, cnt in doc], dtype=np.integer) # Word counts.

if d % self.chunksize == 0:
logger.debug("bound: at document #%i in chunk", d)
Expand Down

0 comments on commit e3e47ef

Please sign in to comment.