Skip to content

Commit

Permalink
Misc: pandas future warning
Browse files Browse the repository at this point in the history
  • Loading branch information
ebranlard committed Aug 8, 2024
1 parent 7d39f05 commit 1e39b82
Show file tree
Hide file tree
Showing 8 changed files with 89 additions and 22 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
runs-on: ubuntu-22.04
strategy:
matrix:
python-version: [3.8, 3.9, 3.11]
python-version: [3.8, 3.9, 3.11, 3.12]

steps:
# --- Install steps
Expand Down
61 changes: 57 additions & 4 deletions pydatview/fast/postpro.py
Original file line number Diff line number Diff line change
Expand Up @@ -689,6 +689,52 @@ def spanwiseColEDTwr(Cols):
def spanwiseColAD(Cols):
""" Return column info, available columns and indices that contain AD spanwise data"""
ADSpanMap=dict()
# From AeroDyn_AllBldNd: TODO Use it directly..
# "ALPHA ","AXIND ","AXIND_QS ","BEM_CT_QS","BEM_F_QS ","BEM_KP_QS","BEM_K_QS ","CD ", &
# "CD_QS ","CHI ","CL ","CLRNC ","CL_QS ","CM ","CMA ","CM_QS ", &
# "CN ","CPMIN ","CT ","CURVE ","CX ","CXA ","CY ","DEBUG1 ", &
# "DEBUG2 ","DEBUG3 ","DYNP ","FBN ","FBS ","FBT ","FBXA ","FBXI ", &
# "FBXL ","FBXP ","FBYA ","FBYI ","FBYL ","FBYP ","FBZA ","FBZI ", &
# "FBZL ","FBZP ","FD ","FL ","FN ","FT ","FX ","FXA ", &
# "FXI ","FXL ","FXP ","FY ","FYI ","FYL ","FYP ","FZI ", &
# "FZL ","FZP ","GAM ","GEOMPHI ","M ","MBN ","MBS ","MBT ", &
# "MBXA ","MBXI ","MBXL ","MBXP ","MBYA ","MBYI ","MBYL ","MBYP ", &
# "MBZA ","MBZI ","MBZL ","MBZP ","MM ","MXI ","MXL ","MXP ", &
# "MYI ","MYL ","MYP ","MZA ","MZI ","MZL ","MZP ","PHI ", &
# "RE ","SGCAV ","SIGCR ","STVX ","STVXA ","STVXI ","STVXL ","STVXP ", &
# "STVY ","STVYA ","STVYI ","STVYL ","STVYP ","STVZ ","STVZA ","STVZI ", &
# "STVZL ","STVZP ","THETA ","TNIND ","TNIND_QS ","TOE ","UA_FLAG ","UA_X1 ", &
# "UA_X2 ","UA_X3 ","UA_X4 ","UA_X5 ","UIN ","UIR ","UIT ","VDISX ", &
# "VDISXA ","VDISXI ","VDISXL ","VDISXP ","VDISY ","VDISYA ","VDISYI ","VDISYL ", &
# "VDISYP ","VDISZ ","VDISZA ","VDISZI ","VDISZL ","VDISZP ","VINDX ","VINDXA ", &
# "VINDXI ","VINDXL ","VINDXP ","VINDY ","VINDYA ","VINDYI ","VINDYL ","VINDYP ", &
# "VINDZA ","VINDZI ","VINDZL ","VINDZP ","VREL ","VUNDX ","VUNDXA ","VUNDXI ", &
# "VUNDXL ","VUNDXP ","VUNDY ","VUNDYA ","VUNDYI ","VUNDYL ","VUNDYP ","VUNDZ ", &
# "VUNDZA ","VUNDZI ","VUNDZL ","VUNDZP ","VX ","VY "/)
# CHARACTER(ChanLen), PARAMETER :: ParamUnitsAry(166) = (/ character(ChanLen) :: & ! This lists the units corresponding to the allowed parameters
# "(deg) ","(-) ","(-) ","(-) ","(-) ","(-) ","(-) ","(-) ", &
# "(-) ","(deg) ","(-) ","(m) ","(-) ","(-) ","(-) ","(-) ", &
# "(-) ","(-) ","(-) ","(deg) ","(-) ","(-) ","(-) ","(-) ", &
# "(-) ","(-) ","(Pa) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ", &
# "(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ", &
# "(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ", &
# "(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ","(N/m) ", &
# "(N/m) ","(N/m) ","(m^2/s)","(1/0) ","(-) ","(N-m/m)","(N-m/m)","(N-m/m)", &
# "(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)", &
# "(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)", &
# "(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(N-m/m)","(deg) ", &
# "(-) ","(-) ","(-) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(deg) ","(-) ","(-) ","(deg) ","(-) ","(rad) ", &
# "(rad) ","(-) ","(-) ","(-) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ", &
# "(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) ","(m/s) "/)


for sB in ['B1','B2','B3']:
ADSpanMap['^[A]*'+sB+r'N(\d*)Alpha_\[deg\]'] =sB+'Alpha_[deg]'
ADSpanMap['^[A]*'+sB+r'N(\d*)AxInd_\[-\]' ] =sB+'AxInd_[-]'
Expand All @@ -698,6 +744,7 @@ def spanwiseColAD(Cols):
ADSpanMap['^[A]*'+sB+r'N(\d*)BEM_kp_qs\[-\]' ]=sB+'BEM_kp_qs_[-]'
ADSpanMap['^[A]*'+sB+r'N(\d*)BEM_F_qs\[-\]' ]=sB+'BEM_F_qs_[-]'
ADSpanMap['^[A]*'+sB+r'N(\d*)BEM_CT_qs_\[-\]' ]=sB+'BEM_CT_qs_[-]'
ADSpanMap['^[A]*'+sB+r'N(\d*)Chi_\[deg\]' ]=sB+'Chi_[deg]'
ADSpanMap['^[A]*'+sB+r'N(\d*)Cl_\[-\]' ] =sB+'Cl_[-]'
ADSpanMap['^[A]*'+sB+r'N(\d*)Cd_\[-\]' ] =sB+'Cd_[-]'
ADSpanMap['^[A]*'+sB+r'N(\d*)Cm_\[-\]' ] =sB+'Cm_[-]'
Expand Down Expand Up @@ -848,7 +895,10 @@ def insert_extra_columns_AD(dfRad, tsAvg, vr=None, rho=None, R=None, nB=None, ch
Ct=nB*Fx/(0.5 * rho * 2 * U0**2 * np.pi * vr)
Ct[vr<0.01*R] = 0
dfRad[sB+'Ctloc_[-]'] = Ct
CT=2*np.trapz(vr_bar*Ct,vr_bar)
try:
CT=2*np.trapezoid(vr_bar*Ct,vr_bar)
except:
CT=2*np.trapz(vr_bar*Ct,vr_bar)
dfRad[sB+'CtAvg_[-]']= CT*np.ones(vr.shape)
except:
pass
Expand Down Expand Up @@ -1571,10 +1621,10 @@ def bin_mean_DF(df, xbins, colBin ):
raise Exception('The column `{}` does not appear to be in the dataframe'.format(colBin))
xmid = (xbins[:-1]+xbins[1:])/2
df['Bin'] = pd.cut(df[colBin], bins=xbins, labels=xmid ) # Adding a column that has bin attribute
df2 = df.groupby('Bin').mean() # Average by bin
df2 = df.groupby('Bin', observed=False).mean() # Average by bin
# also counting
df['Counts'] = 1
dfCount=df[['Counts','Bin']].groupby('Bin').sum()
dfCount=df[['Counts','Bin']].groupby('Bin', observed=False).sum()
df2['Counts'] = dfCount['Counts']
# Just in case some bins are missing (will be nan)
df2 = df2.reindex(xmid)
Expand Down Expand Up @@ -1829,7 +1879,10 @@ def integrateMoment(r, F):
"""
M = np.zeros(len(r)-1)
for ir,_ in enumerate(r[:-1]):
M[ir] = np.trapz(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
try:
M[ir] = np.trapezoid(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
except:
M[ir] = np.trapz(F[ir:]*(r[ir:]-r[ir]), r[ir:]-r[ir])
return M

def integrateMomentTS(r, F):
Expand Down
2 changes: 1 addition & 1 deletion pydatview/io/gnuplot_file.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class GNUPlotFile(File):
@staticmethod
def defaultExtensions():
""" List of file extensions expected for this fileformat"""
return ['.dat']
return ['.dat','.raw']

@staticmethod
def formatName():
Expand Down
17 changes: 11 additions & 6 deletions pydatview/plotdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,11 @@
from pydatview.common import unique, pretty_num, pretty_time, pretty_date
from pydatview.tools.stats import bin_signal
import matplotlib.dates as mdates
try:
trapz = np.trapezoid
except AttributeError:
trapz = np.trapz


# --------------------------------------------------------------------------------}
# --- PlotDataList functions
Expand Down Expand Up @@ -605,7 +610,7 @@ def inty(PD):
return None,'NA'
else:
try:
v=np.trapz(y=PD.y,x=PD.x)
v=trapz(y=PD.y,x=PD.x)
s=pretty_num(v)
return v,s
except:
Expand All @@ -616,7 +621,7 @@ def intyintdx(PD):
return None,'NA'
else:
try:
v=np.trapz(y=PD.y,x=PD.x)/np.trapz(y=PD.x*0+1,x=PD.x)
v=trapz(y=PD.y,x=PD.x)/trapz(y=PD.x*0+1,x=PD.x)
s=pretty_num(v)
return v,s
except:
Expand All @@ -627,7 +632,7 @@ def intyx1(PD):
return None,'NA'
else:
try:
v=np.trapz(y=PD.y*PD.x,x=PD.x)
v=trapz(y=PD.y*PD.x,x=PD.x)
s=pretty_num(v)
return v,s
except:
Expand All @@ -639,8 +644,8 @@ def intyx1_scaled(PD):
return None,'NA'
else:
try:
v=np.trapz(y=PD.y*PD.x,x=PD.x)
v=v/np.trapz(y=PD.y,x=PD.x)
v=trapz(y=PD.y*PD.x,x=PD.x)
v=v/trapz(y=PD.y,x=PD.x)
s=pretty_num(v)
return v,s
except:
Expand All @@ -651,7 +656,7 @@ def intyx2(PD):
return None,'NA'
else:
try:
v=np.trapz(y=PD.y*PD.x**2,x=PD.x)
v=trapz(y=PD.y*PD.x**2,x=PD.x)
s=pretty_num(v)
return v,s
except:
Expand Down
4 changes: 2 additions & 2 deletions pydatview/tools/fatigue.py
Original file line number Diff line number Diff line change
Expand Up @@ -213,9 +213,9 @@ def bin_count(x, bins, meanBin=True):
df = pd.DataFrame(data=x, columns=['x'])
xmid = (bins[:-1]+bins[1:])/2
df['x_mid']= pd.cut(df['x'], bins= bins, labels = xmid ) # Adding a column that has bin attribute
df2 = df.groupby('x_mid').mean() # Average by bin
df2 = df.groupby('x_mid', observed=False).mean() # Average by bin
df['N'] = 1
dfCount = df[['N','x_mid']].groupby('x_mid').sum()
dfCount = df[['N','x_mid']].groupby('x_mid', observed=False).sum()
df2['N'] = dfCount['N']
# Just in case some bins are missing (will be nan)
df2 = df2.reindex(xmid)
Expand Down
13 changes: 9 additions & 4 deletions pydatview/tools/signal_analysis.py
Original file line number Diff line number Diff line change
Expand Up @@ -259,16 +259,21 @@ def applySampler(x_old, y_old, sampDict, df_old=None):
sample_time = float(param[0])
if sample_time <= 0:
raise Exception('Error: sample time must be positive')
# --- Version dependency...
pdVer = [int(s) for s in pd.__version__.split('.')]
sSample = "{:f}s".format(sample_time)
if pdVer[0]<=1 or (pdVer[0]<=2 and pdVer[1]<2):
sSample = "{:f}S".format(sample_time)

time_index = pd.TimedeltaIndex(x_old, unit="S")
x_new = pd.Series(x_old, index=time_index).resample("{:f}S".format(sample_time)).mean().interpolate().values
time_index = pd.to_timedelta(x_old, unit="s")
x_new = pd.Series(x_old, index=time_index).resample(sSample).mean().interpolate().values

if df_old is not None:
df_new = df_old.set_index(time_index, inplace=False).resample("{:f}S".format(sample_time)).mean()
df_new = df_old.set_index(time_index, inplace=False).resample(sSample).mean()
df_new = df_new.interpolate().reset_index(drop=True)
return x_new, df_new
if y_old is not None:
y_new = pd.Series(y_old, index=time_index).resample("{:f}S".format(sample_time)).mean()
y_new = pd.Series(y_old, index=time_index).resample(sSample).mean()
y_new = y_new.interpolate().values
return x_new, y_new

Expand Down
5 changes: 4 additions & 1 deletion pydatview/tools/stats.py
Original file line number Diff line number Diff line change
Expand Up @@ -211,7 +211,10 @@ def pdf_histogram(y,nBins=50, norm=True, count=False):
else:
yh = yh / (nBins*dx)
if norm:
yh=yh/np.trapz(yh,xh)
try:
yh=yh/np.trapezoid(yh,xh)
except:
yh=yh/np.trapz(yh,xh)
return xh,yh

def pdf_gaussian_kde(data, bw='scott', nOut=100, cut=3, clip=(-np.inf,np.inf)):
Expand Down
7 changes: 4 additions & 3 deletions tests/test_signal.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ def test_interp(self, plot=False):


if __name__ == '__main__':
TestSignal().test_interpDF()
TestSignal().test_interp()
# unittest.main()
# TestSignal().test_interpDF()
# TestSignal().test_interp()
# TestSignal().test_up_down_sample()
unittest.main()

0 comments on commit 1e39b82

Please sign in to comment.