Skip to content

Commit

Permalink
Fix metadata update (#267)
Browse files Browse the repository at this point in the history
* Do not use .loc when assigning new values (chained indexing).

* Specify except catch and add some typing
  • Loading branch information
dgdekoning authored Aug 22, 2019
1 parent b5093e3 commit ccdd948
Showing 1 changed file with 12 additions and 11 deletions.
23 changes: 12 additions & 11 deletions activity_browser/app/bwutils/metadata.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# -*- coding: utf-8 -*-
import brightway2 as bw
from bw2data.errors import UnknownObject
import pandas as pd
import numpy as np

Expand Down Expand Up @@ -37,7 +38,7 @@ def _connect_signals(self):
signals.project_selected.connect(self.reset_metadata)
signals.metadata_changed.connect(self.update_metadata)

def add_metadata(self, db_names_list):
def add_metadata(self, db_names_list: list) -> None:
""""Include data from the brightway databases.
Get metadata in form of a Pandas DataFrame for biosphere and
Expand Down Expand Up @@ -76,7 +77,7 @@ def add_metadata(self, db_names_list):

# In a new 'biosphere3' database, some categories values are lists
if 'categories' in df_temp:
df_temp.loc[:, 'categories'] = df_temp['categories'].apply(
df_temp['categories'] = df_temp['categories'].apply(
lambda x: tuple(x) if isinstance(x, list) else x)

dfs.append(df_temp)
Expand All @@ -86,7 +87,7 @@ def add_metadata(self, db_names_list):
self.dataframe.replace(np.nan, '', regex=True, inplace=True) # replace 'nan' values with emtpy string
# print('Dimensions of the Metadata:', self.dataframe.shape)

def update_metadata(self, key):
def update_metadata(self, key: tuple) -> None:
"""Update metadata when an activity has changed.
Three situations:
Expand All @@ -101,7 +102,7 @@ def update_metadata(self, key):
"""
try:
act = bw.get_activity(key) # if this does not work, it has been deleted (see except:).
except:
except UnknownObject:
# Situation 1: activity has been deleted (metadata needs to be deleted)
print('Deleting activity from metadata:', key)
self.dataframe.drop(key, inplace=True)
Expand All @@ -116,8 +117,8 @@ def update_metadata(self, key):
if key in self.dataframe.index: # Situation 2: activity has been modified (metadata needs to be updated)
print('Updating activity in metadata: ', act, key)
for col in self.dataframe.columns:
self.dataframe.loc[key][col] = act.get(col, '')
self.dataframe.loc[key]['key'] = act.key
self.dataframe.at[key, col] = act.get(col, '')
self.dataframe.at[key, 'key'] = act.key

else: # Situation 3: Activity has been added to database (metadata needs to be generated)
print('Adding activity to metadata:', act, key)
Expand All @@ -127,24 +128,24 @@ def update_metadata(self, key):
self.dataframe.replace(np.nan, '', regex=True, inplace=True) # replace 'nan' values with emtpy string
# print('Dimensions of the Metadata:', self.dataframe.shape)

def reset_metadata(self):
def reset_metadata(self) -> None:
"""Deletes metadata when the project is changed."""
# todo: metadata could be collected across projects...
print('Reset metadata.')
self.dataframe = pd.DataFrame()
self.databases = set()

def get_existing_fields(self, field_list):
def get_existing_fields(self, field_list: list) -> list:
"""Return a list of fieldnames that exist in the current dataframe.
"""
return [fn for fn in field_list if fn in self.dataframe.columns]

def get_metadata(self, keys, columns):
def get_metadata(self, keys: list, columns: list) -> pd.DataFrame:
"""Return a slice of the dataframe matching row and column identifiers.
"""
return self.dataframe.loc[keys][columns]

def get_database_metadata(self, db_name):
def get_database_metadata(self, db_name: str) -> pd.DataFrame:
"""Return a slice of the dataframe matching the database.
"""
return self.dataframe[self.dataframe['database'] == db_name]
Expand All @@ -157,7 +158,7 @@ def index(self):
"""
return self.dataframe.index

def unpack_tuple_column(self, colname, new_colnames=None):
def unpack_tuple_column(self, colname: str, new_colnames: list=None) -> None:
"""Takes the given column in the dataframe and unpack it.
To allow for quick aggregation, we:
Expand Down

0 comments on commit ccdd948

Please sign in to comment.