diff --git a/_Old/Tutorial_ToFu_Geom.py b/_Old/Tutorial_ToFu_Geom.py deleted file mode 100644 index 7d0e21890..000000000 --- a/_Old/Tutorial_ToFu_Geom.py +++ /dev/null @@ -1,339 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Apr 24 15:30:23 2014 - -@author: didiervezinet -""" -import numpy as np -import matplotlib.pyplot as plt -import ToFu_PathFile as TFPF -import ToFu_Geom -# -# -# -# -# - -""" -# Defining a Tor object from a numpy array -theta = np.linspace(0,2*np.pi,100) -Rcoo = 1.5 + 0.75*np.cos(theta) -Zcoo = 0.75*np.sin(theta) -PolyRef = np.array([Rcoo,Zcoo]) -Tor1 = ToFu_Geom.Tor('Example1', PolyRef) -print Tor1 -""" -# Defining a Tor object from a .txt file -RP = TFPF.Find_Rootpath() -PathFile = RP + '/Inputs/AUG_Tor.txt' -PolyRef = np.loadtxt(PathFile, dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2) -Tor2 = ToFu_Geom.Tor('V1',PolyRef) -print Tor2 -""" -# Plotting the poloidal and toroidal projections of the reference polygon of ASDEX Upgrade -axP, axT = Tor2.plot_AllProj(Elt='PI') -#axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Tor_AllProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -axP = Tor2.plot_PolProj_Vect(ax=axP) -#axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Tor_AllProjAndVect.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -ax3 = Tor2.plot_3D_plt(thetaLim=(np.pi/4.,7.*np.pi/4.)) -#ax3.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Tor_3D.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -axImp = Tor2.plot_Impact_PolProj() -axImp3 = Tor2.plot_Impact_3D() -axImp.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Tor_Imp.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axImp3.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Tor_Imp3.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - - -# Defining a LOS object as an example, in ASDEX Upgrade -D = np.array([2.,-2.,1.]).reshape((3,1)) # Defining D (starting point) -uu = np.array([-0.2,1.,-0.8]) # Defining uu (vector for direction of LOS) -uu = (uu/np.linalg.norm(uu)).reshape((3,1)) # Normalising uu -Los = ToFu_Geom.LOS('LOS 1',(D,uu),Tor=Tor2) # Creating a LOS object using a pre-defined Tor object (kwdarg 'T' stands for Tor) -print Los - -axP, axT = Tor2.plot_AllProj() -axP, axT = Los.plot_AllProj(Elt='LDIORr',EltTor='PI') -#axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_LOS_AllProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -ax3 = Los.plot_3D_plt(Elt='LDIORr',EltTor='PI',thetaLim=(0.,7.*np.pi/4.),MdictR={'c':'b','marker':'o','ls':'None'},Mdictr={'c':'r','marker':'+','ls':'None'}) -#ax3.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_LOS_3D.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -mdict = {'ls':'None','marker':'o','c':'b'} -axImp = Los.plot_Impact_PolProj(ax=axImp,Mdict=mdict) # Plot coordinates (impact parameter and angles) with respect to Los.Tor.ImpRZ = Tor2.ImpRZ (=center of mass of Tor2 by default) -RefP2 = np.array([1.5,-0.05]).reshape((2,1)) -Los.set_Impact(RefP2) # Compute the new coordinates (impact parameter and angles) with respect to RefP2 -mdict = {'ls':'None','marker':'o','c':'r'} -axImp = Los.plot_Impact_PolProj(ax=axImp,Mdict=mdict) # Plot new coordinates (impact parameter and angles) with respect to RefP2 -#axImp.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_LOS_ImpPol.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Creating two arbirary cameras -P1, P2 = np.array([2.,-2.,1.]).reshape((3,1)), np.array([0.5,-0.5,-0.5]).reshape((3,1)) # Creating the common points -n1, n2 = 10, 15 # Reminding the number of LOS in each camera -phi1, phi2 = np.pi/7., np.pi/4. -theta1, theta2 = np.linspace(np.pi/8.,np.pi/4.,n1), np.linspace(5.*np.pi/6.,6.5*np.pi/6.,n2) -uu1 = np.array([-np.sin(phi1)*np.cos(theta1),np.cos(phi1)*np.cos(theta1),-np.sin(theta1)]) # Creating the unitary vectors -uu2 = np.array([-np.sin(phi2)*np.cos(theta2),np.cos(phi2)*np.cos(theta2),-np.sin(theta2)]) -LLos1 = [ToFu_Geom.LOS("Los1"+str(ii),(P1,uu1[:,ii:ii+1]),Tor=Tor2) for ii in range(0,n1)] # Creating the lists of LOS objects -LLos2 = [ToFu_Geom.LOS("Los2"+str(ii),(P2,uu2[:,ii:ii+1]),Tor=Tor2) for ii in range(0,n2)] -GLos1, GLos2 = ToFu_Geom.GLOS("Cam1",LLos1), ToFu_Geom.GLOS("Cam2",LLos2) # Creating the GLOS objects -print GLos1, GLos2 - -# Plotting the GLOS objects -axP, axT = GLos1.plot_AllProj(Ldict={'c':'b'},Elt='L',EltTor='PI',Lplot='In') -axP, axT = GLos2.plot_AllProj(axP=axP,axT=axT,Ldict={'c':'r'},Elt='L') -#axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GLOS_AllProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting in projection space of the two cameras -axImp = Tor2.plot_Impact_PolProj() -axImp = GLos1.plot_Impact_PolProj(ax=axImp,Mdict={'ls':'None','marker':'o','c':'b'}) -axImp = GLos2.plot_Impact_PolProj(ax=axImp,Mdict={'ls':'None','marker':'x','c':'r'}) -#axImp.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GLOS_ImpPol.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -# Selecting subsets -for ii in range(0,n1): - GLos1.LLOS[ii].Id.Code = "Signal"+str(ii*10) - GLos1.LLOS[ii].Id.Age = 3.*ii/n1 - ind = GLos1.get_ind_LOS('Code',"in ['Signal0','Signal50']") -subLLos = GLos1.pick_LOS('Age','<=1.') -print ind, subLLos -# Alternative -LAttr = [los.Id.Code for los in GLos1.LLOS] -ind = [LAttr.index('Signal0'), LAttr.index('Signal50')] -print ind -""" -# Create two Apert and one Detect -d1, d2, d3 = 0.02, 0.02, 0.02 # Characteristic size of each polygon -C1, C2, C3 = np.array([1.56,-1.49,0.75]), np.array([1.52,-1.38,0.70]), np.array([1.60,-1.60,0.80]) # Creating the centers around which the polygons will be built -C1, C2, C3 = C1.reshape((3,1)), C2.reshape((3,1)), C3.reshape((3,1)) -n1, n2, n3 = np.array([0.1,-2.,0.5]), np.array([1.,-1.,0.8]), np.array([1.,-1.,0.]) # Creating the vectors of there respective planes -n1, n2, n3 = n1/np.linalg.norm(n1), n2/np.linalg.norm(n2), n3/np.linalg.norm(n3) -e11, e21, e31 = np.cross(n1,np.array([0.,0.,1.])), np.cross(n2,np.array([0.,0.,1.])), np.cross(n3,np.array([0.,0.,1.])) -e11, e21, e31 = e11/np.linalg.norm(e11), e21/np.linalg.norm(e21), e31/np.linalg.norm(e31) -e12, e22, e32 = np.cross(n1,e11), np.cross(n2,e21), np.cross(n3,e31) # Building a local normalised base of vector -Poly1 = d1*np.array([[-1, 1, 0.],[-1, -1, 1]]) # The first polygon is a triangle -Poly2 = d2*np.array([[-1, 1, 1.5, 0., -1.5],[-1, -1, 0., 1, 0.]]) # The second one is a pentagon -Poly3 = d3*np.array([[-1., 1, 1, -1,],[-1., -1., 1., 1.]]) # The third is a rectangle -Poly1 = np.dot(C1,np.ones((1,Poly1.shape[1]))) + np.dot(e11.reshape((3,1)),Poly1[0:1,:]) + np.dot(e12.reshape((3,1)),Poly1[1:2,:]) -Poly2 = np.dot(C2,np.ones((1,Poly2.shape[1]))) + np.dot(e21.reshape((3,1)),Poly2[0:1,:]) + np.dot(e22.reshape((3,1)),Poly2[1:2,:]) -Poly3 = np.dot(C3,np.ones((1,Poly3.shape[1]))) + np.dot(e31.reshape((3,1)),Poly3[0:1,:]) + np.dot(e32.reshape((3,1)),Poly3[1:2,:]) - -Ap1, Ap2 = ToFu_Geom.Apert('Apert1', Poly1, Tor=Tor2), ToFu_Geom.Apert('Apert2', Poly2, Tor=Tor2) # Creating the two apertures, using Tor2 -D1 = ToFu_Geom.Detect('Detect1', Poly3, Tor=Tor2, LApert=[Ap1,Ap2], CalcEtend=True, CalcCone=False) # Creating the Detect, using Tor2 and the two apertures, calculation of the etendue -print Ap1, Ap2, D1 -print D1.LOS, D1.LOS_Etend_Perp -""" -# Plot the Apert and Detect objects created -axP, axT = D1.plot_AllProj(Elt='PV', EltApert='PV', EltLOS='LDIORr', EltTor='PI') -ax3 = D1.plot_3D_plt(Elt='PV', EltApert='PV', EltLOS='DI', EltTor='',MdictI={'c':'b','marker':'o','ls':'None'}) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_AllProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax3.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_3D.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -ax = D1.plot_SAng_OnPlanePerp(Ra=0.5) -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SolAngPlane.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting the evolution of the etendue on the LOS -ax = D1.plot_Etend_AlongLOS(NP=5, Colis=False, Modes=['simps','trapz','quad'], Ldict=[{'c':'k','ls':'dashed','lw':2},{'c':'b','ls':'dashed','lw':2},{'c':'r','ls':'dashed','lw':2}]) -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting poloidal and toroidal slices -axSAP, axNbP = D1.plot_PolSlice_SAng() -axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP) -axSAT, axNbT = D1.plot_TorSlice_SAng() -axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT) -axSAP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolSlice.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axSAT.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorSlice.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting Poloidal and Toroidal projections -axSAP, axNbP = D1.plot_PolProj_SAng() -axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP) -axSAT, axNbT = D1.plot_TorProj_SAng() -axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT) -axSAP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axSAT.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorProj.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting Poloidal and Toroidal projections without collision detection -axSAP, axNbP = D1.plot_PolProj_SAng(Colis=False) -axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP) -axSAT, axNbT = D1.plot_TorProj_SAng(Colis=False) -axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT) -axSAP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolProj_NoColis.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axSAT.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorProj_NoColis.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting poloidal and toroidal projections polygons of the viewing Cone -axP, axT = D1.plot_AllProj(Elt='PVC', EltApert='PV', EltLOS='LDIORr', EltTor='PI') -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_AllProj_Cone.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Plotting the etendue along the extended LOS -ax = D1.plot_Etend_AlongLOS(NP=14, Length='kMax', Colis=True, Modes=['simps'], PlotL='abs', Ldict=[{'c':'k','ls':'None','lw':2,'marker':'x','markersize':10}]) -ax = D1.plot_Etend_AlongLOS(ax=ax, NP=14, Length='kMax', Colis=False, Modes=['simps'], PlotL='abs', Ldict=[{'c':'r','ls':'None','lw':2,'marker':'o','markersize':10}]) -ax = D1.plot_Etend_AlongLOS(ax=ax, NP=6, Length='POut', Colis=True, Modes=['simps'], PlotL='abs', Ldict=[{'c':'b','ls':'None','lw':2,'marker':'+','markersize':10}]) -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS_Extend.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() -""" - -# Plotting the real extent of Detect in projection space -axImp = D1.plot_Impact_PolProj(Elt='DLT') -axImp.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_Imp.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Define toroidally constant emissivity -def Emiss1(Points): - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Z = Points[2,:] - Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2) - ind = Tor2.isInside(np.array([R,Z])) - Val[~ind] = 0. - return 1000.*Val -RR, ZZ = np.linspace(Tor2.PRMin[0], Tor2.PRMax[0],100), np.linspace(Tor2.PZMin[1], Tor2.PZMax[1],200) -RRf, ZZf = np.ones((200,1))*RR, ZZ.reshape((200,1))*np.ones((1,100)) -Val = Emiss1(np.array([RRf.flatten()*np.cos(0), RRf.flatten()*np.sin(0), ZZf.flatten()])) -ax = Tor2.plot_PolProj(Elt='P') -Val[~Tor2.isinside(np.array([RRf.flatten(),ZZf.flatten()]))] = np.nan -ax.contourf(RRf, ZZf, Val.reshape((200,100)),50) -#plt.show() -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_Detect_SynthDiag.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -SigLOS1, SigLOS2 = D1.calc_Sig(Emiss1, Method='LOS', Mode='quad'), D1.calc_Sig(Emiss1, Method='LOS', Mode='simps') -SigCol = D1.calc_Sig(Emiss1, Colis=True, Mode='simps') -SigNoC = D1.calc_Sig(Emiss1, Colis=False, Mode='simps') -print "Signals :", SigLOS1, SigLOS2, SigCol, SigNoC -'Signals : [ 8.34905419e-08] [ 6.33159209e-08]' -plt.show() - - -# Opening the GDetect -""" -Cams = ['F','G','H1','H2','H3','I1','I2','I3','J1','J2','J3','K1','K2','L','M'] -L = os.listdir('./Inputs/') -Str = 'ToFu_Geom_GDetect_AUG_SXR_Test_' -GD = [] -for CC in Cams: - print "Loading GDetect "+CC - pathfileext = './Objects/'+Str+CC+'_D20141128_T195755'+'.pck' - with open(pathfileext, 'rb') as input: - obj = pck.load(input) - GD.append(obj) -""" -pathfileext = './Objects/ToFu_Geom_GDetect_AUG_SXR_F_D20141202_T230455.pck' -with open(pathfileext, 'rb') as input: - F = pck.load(input) - -# Plot etendues -ax = F.plot_Etendues() -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Etend.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Plot AllProj -axP1, axT1 = F.plot_AllProj(Elt='CP',EltApert='P',EltLOS='',EltTor='P') -axP2, axT2 = F.plot_AllProj(Elt='P',EltApert='P',EltLOS='L',EltTor='P') -ind = F.get_ind_Detect(IDAttr='Name',IDExp="=='F_021'").nonzero()[0] -axP3, axT3 = F.LDetect[ind].plot_AllProj(Elt='CP',EltApert='',EltLOS='L',EltTor='P') -axP1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProjC.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axP2.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProjL.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -axP3.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProj_F019.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -# Plot projection spaces -ax = F.plot_Impact_PolProj(Elt='CLT') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Impact.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Define toroidally variable emissivity -def Emiss2(Points): - ROff = 0.05 - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Theta = np.arctan2(Points[1,:],Points[0,:]) - Z = Points[2,:] - CentR = 1.68+ROff*np.cos(Theta) - CentZ = 0.05+ROff*np.sin(Theta) - Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-CentR)**2/0.08**2 - (Z-CentZ)**2/0.15**2) - ind = Tor2.isinside(np.array([R,Z])) - Val[~ind] = 0. - return 1000.*Val - - -# Define anisotropic emissivity -def Emiss3(Points, Vect): - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Theta = np.arctan2(Points[1,:],Points[0,:]) - Z = Points[2,:] - Cos = -np.sin(Theta)*Vect[0,:] + np.cos(Theta)*Vect[1,:] - Sin2 = Vect[2,:]**2 + (np.sin(Theta)*Vect[1,:] + np.cos(Theta)*Vect[0,:])**2 - sca = 100.*Cos**2+1.*Sin2 - Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2) - Val = Val*sca - ind = Tor2.isinside(np.array([R,Z])) - Val[~ind] = 0. - return 1000.*Val - - -# Define vertically-varying emissivity -def EmissZ(Points): - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Z = Points[2,:] - Val = np.exp(-(Z-0.05)**2/0.35**2) - ind = Tor2.isinside(np.array([R,Z])) - Val[~ind] = 0. - return 1000.*Val - - -# Pre-compute the grid -F.set_SigPreCompMat() - - -# Plot difference between Volume and LOS approach for toroidally invariant emissivity -Sig1, Sig2 = F.calc_Sig(Emiss1, Method='LOS',Mode='quad'), F.calc_Sig(Emiss1, Method='Vol',Mode='simps', PreComp=True) -ax = F.plot_Sig(Sig1) -ax.plot(np.arange(1,F.nDetect+1), Sig2 ,label='Vol', c='r') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig1.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Plot difference between Volume and LOS approach for vertcally varying emissivity -Sig1, Sig2 = F.calc_Sig(EmissZ, Method='LOS',Mode='quad'), F.calc_Sig(EmissZ, Method='Vol',Mode='simps', PreComp=True) -ax = F.plot_Sig(Sig1) -ax.plot(range(1,F.nDetect+1), Sig2 ,label='Vol', c='r') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_SigZ.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Plot difference for toroidally varying emissivity -Sig1, Sig2 = F.calc_Sig(Emiss2, Method='LOS',Mode='quad'), F.calc_Sig(Emiss2, Method='Vol',Mode='simps', PreComp=True) -ax = F.plot_Sig(Sig1) -ax.plot(range(1,F.nDetect+1), Sig2 ,label='Vol', c='r') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig2.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Plot difference for anisotropic emissivity -# Plot difference for toroidally varying emissivity -Sig1, Sig2 = F.calc_Sig(Emiss3, Ani=True, Method='LOS',Mode='quad'), F.calc_Sig(Emiss3, Ani=True, Method='Vol',Mode='sum', PreComp=True) -ax = F.plot_Sig(Sig1) -ax.plot(range(1,F.nDetect+1), Sig2 ,label='Vol', c='r') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig3.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - - - - diff --git a/_Old/Tutorial_ToFu_Inv.py b/_Old/Tutorial_ToFu_Inv.py deleted file mode 100644 index 21183d597..000000000 --- a/_Old/Tutorial_ToFu_Inv.py +++ /dev/null @@ -1,47 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Apr 24 15:30:23 2014 - -@author: didiervezinet -""" -import numpy as np -import matplotlib.pyplot as plt -import ToFu_Geom -import ToFu_Mesh -import ToFu_Inv -import ToFu_PathFile as TFPF -import os -import cPickle as pck # for saving objects -import ToFu_Defaults -RP = TFPF.Find_Rootpath() - -# Loading input data (data, GMat2D, BFunc2D...) -Input = TFPF.open_object(RP+'/InputTest_Shaped.pck') -t = Input['t'] -data = Input['data'] -sigma = Input['sigma'] - -Deg = 1 -PathFileExt = '_simps_SubP0.03_SubModeRel_SubTheta0.02_D20141205_T153306.pck' -G = TFPF.open_object(RP+'/Objects/TFMC_GMat2D_AUG_SXR_Rough1_D'+str(Deg)+PathFileExt) -Gbis = G.get_SubGMat2D(LIn=Input['LNames']) -BF2 = Gbis.get_BF2() - -Tor = TFPF.open_object(RP+'/Objects/TFG_Tor_AUG_D20141202_T230455.pck') - -# Performing inversion -Dt = [4.54,4.5405] -Coefs, tbis, databis, sigma, Mu, Chi2N, R, Spec, tt = ToFu_Inv.InvChoose(Gbis.TMat_csr, data, t, BF2, sigma=sigma, Dt=Dt, SolMethod='InvLin_AugTikho_V1', Deriv='D1N2', IntMode='Vol', Sparse=True, timeit=True) - - -# Test output with various configurations -Nit = np.array([spec[0] for spec in Spec]) -ani, axInv, axTMat, Laxtemp = ToFu_Inv.Inv_MakeAnim(BF21, Coefs, t=tbis, TMat=None, Com='Blabla', shot=None, SXR=None, sigma=None, Chi2N=None, Mu=None, R=None, Nit=None, Deriv=0, InvPlotFunc='contourf') -ani, axInv, axTMat, Laxtemp = ToFu_Inv.Inv_MakeAnim(BF21, Coefs, t=tbis, TMat=None, Com='Blabla', shot=None, SXR=None, sigma=None, Chi2N=Chi2N, Mu=Mu, R=R, Nit=Nit, Deriv=0, InvPlotFunc='contourf') -ani, axInv, axTMat, Laxtemp = ToFu_Inv.Inv_MakeAnim(BF21, Coefs, t=tbis, TMat=Gbis.TMat_csr.toarray(), Com='Blabla', shot=None, SXR=databis, sigma=None, Chi2N=None, Mu=None, R=None, Nit=None, Deriv=0, InvPlotFunc='contourf') -ani, axInv, axTMat, Laxtemp = ToFu_Inv.Inv_MakeAnim(BF21, Coefs, t=tbis, TMat=Gbis.TMat_csr.toarray(), Com='Blabla', shot=None, SXR=databis, sigma=None, Chi2N=Chi2N, Mu=Mu, R=R, Nit=Nit, Deriv=0, InvPlotFunc='contourf') - -#axInv = Tor.plot_PolProj(ax=axInv) -plt.show() - - diff --git a/_Old/Tutorial_ToFu_MatComp.py b/_Old/Tutorial_ToFu_MatComp.py deleted file mode 100644 index 4df4ecefc..000000000 --- a/_Old/Tutorial_ToFu_MatComp.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Apr 24 15:30:23 2014 - -@author: didiervezinet -""" -import numpy as np -import matplotlib.pyplot as plt -import ToFu_PathFile as TFPF -import ToFu_Defaults as TFD -import ToFu_Geom as TFG -import ToFu_MatComp as TFMC -import os -import cPickle as pck # for saving objects -RP = TFPF.Find_Rootpath() -# -# - -# Loading a pre-defined GDetect object and a BaseFunc2D object, in the example of ASDEX Upgrade -GD = TFPF.open_object(RP+'/Objects/TFG_GDetect_AUG_SXR_Test_F_2_D20141128_T195755.pck') -BF0 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D0_D20141202_T230455.pck') -BF1 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D1_D20141202_T230455.pck') -BF2 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D2_D20141202_T230455.pck') - -# Simply ask ToFu_MatComp to compute geometry matrix associated to each set of basis functions -GM0 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D0', BF0, GD, Mode='simps') -GM1 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D1', BF1, GD, Mode='simps') -GM2 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D2', BF2, GD, Mode='simps') - -# Plot the sum of the geometry matrix, in both dimensions -ax1, ax2 = GM0.plot_sum(TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM0_Sum.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axM, axBF = GM0.plot_OneDetect_PolProj(8, TLOS=True) -axM.set_xlim(400,500) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM0_Detect.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axD, axDred = GM0.plot_OneBF_PolProj(450, TLOS=True) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM0_BF.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -# Now, show the higher-order Basis Functions - -ax1, ax2 = GM1.plot_sum(TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM1_Sum.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axM, axBF = GM1.plot_OneDetect_PolProj(8, TLOS=True) -axM.set_xlim(400,500) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM1_Detect.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axD, axDred = GM1.plot_OneBF_PolProj(450, TLOS=True) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM1_BF.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -ax1, ax2 = GM2.plot_sum(TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM2_Sum.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axM, axBF = GM2.plot_OneDetect_PolProj(8, TLOS=True) -axM.set_xlim(400,500) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM2_Detect.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -axP, axD, axDred = GM2.plot_OneBF_PolProj(450, TLOS=True) -axP.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM2_BF.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -# Plot synthetic diagnostic - -Tor2 = GD.Tor -def Emiss1(Points): - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Z = Points[2,:] - Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2) - ind = Tor2.isInside(np.array([R,Z])) - Val[~ind] = 0. - return 1000.*Val - -Coefs0 = BF0.get_Coefs(ff=Emiss1) -Coefs1 = BF1.get_Coefs(ff=Emiss1) -Coefs2 = BF2.get_Coefs(ff=Emiss1) - -ax1, ax2, ax3, ax4 = GM0.plot_Sig(Coefs=Coefs0, TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM0_Sig.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax1, ax2, ax3, ax4 = GM1.plot_Sig(Coefs=Coefs1, TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM1_Sig.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax1, ax2, ax3, ax4 = GM2.plot_Sig(Coefs=Coefs2, TLOS=True) -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMatComp_GM2_Sig.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - - diff --git a/_Old/Tutorial_ToFu_Mesh.py b/_Old/Tutorial_ToFu_Mesh.py deleted file mode 100644 index bcdd5cd00..000000000 --- a/_Old/Tutorial_ToFu_Mesh.py +++ /dev/null @@ -1,168 +0,0 @@ -# -*- coding: utf-8 -*- -""" -Created on Thu Apr 24 15:30:23 2014 - -@author: didiervezinet -""" -import numpy as np -import matplotlib.pyplot as plt -import ToFu_Defaults as TFD -import ToFu_PathFile as TFPF -import ToFu_Geom as TFG -import ToFu_Mesh -import os -import cPickle as pck # for saving objects -RP = TFPF.Find_Rootpath() -# -# - -# Defining a 1D mesh with user-defined resolution -#Knots, Res = ToFu_Mesh.LinMesh_List([(1.,1.5),(1.5,1.8),(1.8,2.)], [(0.06,0.02),(0.02,0.02),(0.02,0.08)])#ToFu_Mesh.LinMesh_List([(0.,10.)], [(1.,1.)]) -#print Res -#print Knots - -# [(0.0569230769230769, 0.02), (0.02, 0.02), (0.02, 0.07999999999999999)] -# [ 1. 1.05692308 1.11076923 1.16153846 1.20923077 1.25384615 1.29538462 1.33384615 1.36923077 1.40153846 1.43076923 1.45692308 1.48 1.5 1.52 1.54 1.56 1.58 1.6 1.62 1.64 1.66 1.68 1.7 1.72 1.74 1.76 1.78 1.8 1.82 1.86 1.92 2. ] -""" -# Creating the associated Mesh1D object and exploring it properties -M1 = ToFu_Mesh.Mesh1D('M1', Knots) -ax1 = M1.plot(Elt='KCN') -ax2 = M1.plot_Res() -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M1.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax2.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M1_Res.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() -""" - -# Creating an arbitrary 2D mesh object based on the vacuum chamber of ASDEX Upgrade -PolyRef = np.loadtxt(RP + '/Inputs/AUG_Tor.txt', dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2) -AUG = TFG.Tor('AUG',PolyRef) -KnotsR, ResR = ToFu_Mesh.LinMesh_List([(AUG._PRMin[0],1.5),(1.5,1.75),(1.75,AUG._PRMax[0])], [(0.06,0.02),(0.02,0.02),(0.02,0.06)]) -KnotsZ, ResZ = ToFu_Mesh.LinMesh_List([(AUG._PZMin[1],-0.1),(-0.1,0.1),(0.1,AUG._PZMax[1])], [(0.10,0.02),(0.02,0.02),(0.02,0.08)]) -M2 = ToFu_Mesh.Mesh2D('M2', [KnotsR,KnotsZ]) -#ax = M2.plot(Elt='MBKC') -#ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M2_Raw.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -# Getting a submesh from it using a smoothed convex polygon -Poly = AUG.get_InsideConvexPoly(Spline=True) -M2bis = M2.get_SubMeshPolygon(Poly, NLim=2) -#ax = AUG.plot_PolProj(Elt='P') -#ax = M2bis.plot(Elt='BM', ax=ax) -#ax1, ax2, ax3, axcb = M2bis.plot_Res() -#ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M2.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M2_Res.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() -""" -# Selecting all Knots associated to a mesh element, and the other way around -Knots50 = M2bis.Knots[:,M2bis.Cents_Knotsind[:,50].flatten()] -print Knots50 -ax = M2bis.plot_Cents(Ind=50, Elt='BMKC') -# [[ 1.69230769 1.71153846 1.71153846 1.69230769] -# [-0.94421053 -0.94421053 -0.85868421 -0.85868421]] -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M2_Cents.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -#plt.show() - -ind = np.array([160,655,1000]) -Cents = M2bis.Cents[:,M2bis.Knots_Centsind[:,ind].flatten()] -print Cents -ax = M2bis.plot_Knots(Ind=ind, Elt='BMKC') -# [[ 1.83922727 1.07454545 1.70192308 1.87418182 1.13548182 1.72115385 1.83922727 1.07454545 1.70192308 1.87418182 1.13548182 1.72115385] -# [-0.66452632 -0.05 0.13140693 -0.66452632 -0.05 0.13140693 -0.59428947 -0.03 0.15562771 -0.59428947 -0.03 0.15562771]] -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_M2_Knots.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Creating a BaseFunc1D -BF1 = ToFu_Mesh.BF1D('BF1',M1,2) -FF = lambda xx: np.exp(-(xx-1.5)**2/0.2**2) + 0.4*np.exp(-(xx-1.65)**2/0.01**2) -Coefs, res = BF1.get_Coefs(ff=FF) -ax = BF1.plot(Coefs=Coefs, Elt='TL') -ax.plot(np.linspace(1.,2.,500), FF(np.linspace(1.,2.,500)), c='r', lw=2, label='Ref function') -ax.legend(**TFD.TorLegd) -#ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF1.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -ax = BF1.plot(Coefs=Coefs, Deriv='D2', Elt='T',Totdict={'c':'k','lw':2}) -ax = BF1.plot(ax=ax, Coefs=Coefs, Deriv='D1N2', Elt='T',Totdict={'c':'b','lw':2}) -ax = BF1.plot(ax=ax, Coefs=Coefs, Deriv='D1FI', Elt='T',Totdict={'c':'r','lw':2}) -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF1_Deriv.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -# Getting integral operators and values -A, m = BF1.get_IntOp(Deriv='D0') -Int = BF1.get_IntVal(Coefs=Coefs, Deriv='D0') -print A.shape, m -print Int -# (30,) 0 -# 0.361213888999 - -# Plotting selected functions and their Cents and Knots -ax = BF1.plot_Ind(Ind=[0,5,8], Elt='LCK') -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF1_Select.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() -""" - -# Creating a BaseFunc2D object -BF2 = ToFu_Mesh.BF2D('BF2',M2bis,1) # Defining the BaseFunc2D object -""" -PathFile = RP + '/Inputs/AUG_Tor.txt' -PolyRef = np.loadtxt(PathFile, dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2) -Tor2 = TFG.Tor('AUG',PolyRef) # Defining the Tor object for the Emiss function definition -def Emiss(Points): # Definition of the inpout Emiss function - R = np.sqrt(Points[0,:]**2+Points[1,:]**2) - Z = Points[2,:] - Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2) - ind = Tor2.isInside(np.array([R,Z])) - Val[~ind] = 0. - return Val - -ax1, ax2 = BF2.plot_fit(ff=Emiss) # Plotting the fitted function -""" -Coefs, res = 1.,0#BF2.get_Coefs(ff=Emiss) # Extracxting the coefficients corresponding to the fitted function -""" -f, axarr = plt.subplots(2,4, sharex=True, facecolor="w" ,figsize=(20,13)) -ax = BF2.plot(ax=axarr[0,0], Coefs=Coefs,Deriv='D1', DVect=TFD.BF2_DVect_Def) # Plotting the gradient scalar vertical vector (Z-component) -ax.axis("equal"), ax.set_title("D1-Z") -ax = BF2.plot(ax=axarr[1,0], Coefs=Coefs,Deriv='D1', DVect=TFD.BF2_DVect_Defbis) # Plotting the gradient scalar horizontal vector (R-vector) -ax.axis("equal"), ax.set_title("D1-R") -ax = BF2.plot(ax=axarr[0,1], Coefs=Coefs,Deriv='D1N2') # Plotting the squared norm of the gradient -ax.axis("equal"), ax.set_title("D1N2") -ax = BF2.plot(ax=axarr[1,1], Coefs=Coefs,Deriv='D1FI') # Plotting the local fisher information -ax.axis("equal"), ax.set_title("D1FI") -ax = BF2.plot(ax=axarr[0,2], Coefs=Coefs,Deriv='D2Lapl') # Plotting the laplacian -ax.axis("equal"), ax.set_title("D2Lapl") -ax = BF2.plot(ax=axarr[1,2], Coefs=Coefs,Deriv='D2LaplN2') # Plotting the squared norm of the laplacian -ax.axis("equal"), ax.set_title("D2LaplN2") -ax = BF2.plot(ax=axarr[0,3], Coefs=Coefs,Deriv='D2Gauss') # Plotting the Gaussian curvature of the surface -ax.axis("equal"), ax.set_title("D2Gauss") -ax = BF2.plot(ax=axarr[1,3], Coefs=Coefs,Deriv='D2Mean') # Plotting the Mean curvature of the surface -ax.axis("equal"), ax.set_title("D2Mean") -ax1.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF2.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF2_Deriv.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - - -# Plotting selected basis functions support -ax = BF2.plot_Ind(Ind=[200,201,202, 300,301,302, 622,623,624,625,626, 950], Elt='L', EltM='M', Coefs=Coefs) # Plotting local basis functions values and mesh -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF2_Int1.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -ax = BF2.plot_Ind(Ind=[200,201,202, 300,301,302, 622,623,624,625,626, 950], Elt='SP', EltM='MCK', Coefs=Coefs) # Plotting local basis functions support and PMax and mesh with centers and knots -ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF2_Int2.png",frameon=None,bbox_inches=0) # Saving for Tutorial illustration -plt.show() - -""" -# Getting integral operators and values -print BF2.Mesh.Surf -print "Int radiation : ", BF2.get_IntVal(Deriv='D0', Coefs=1.) -print "Int sq. gradient : ", BF2.get_IntVal(Deriv='D1N2', Coefs=Coefs) -# 1.69963173663 -# Surf : -# Vol : -# Surf : -# Vol : - - - - - - - diff --git a/_Old_doc/Makefile b/_Old_doc/Makefile deleted file mode 100644 index 20906d953..000000000 --- a/_Old_doc/Makefile +++ /dev/null @@ -1,177 +0,0 @@ -# Makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = sphinx-build -PAPER = -BUILDDIR = build - -# User-friendly check for sphinx-build -ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) -$(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) -endif - -# Internal variables. -PAPEROPT_a4 = -D latex_paper_size=a4 -PAPEROPT_letter = -D latex_paper_size=letter -ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source -# the i18n builder cannot share the environment and doctrees with the others -I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) source - -.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext - -help: - @echo "Please use \`make ' where is one of" - @echo " html to make standalone HTML files" - @echo " dirhtml to make HTML files named index.html in directories" - @echo " singlehtml to make a single large HTML file" - @echo " pickle to make pickle files" - @echo " json to make JSON files" - @echo " htmlhelp to make HTML files and a HTML help project" - @echo " qthelp to make HTML files and a qthelp project" - @echo " devhelp to make HTML files and a Devhelp project" - @echo " epub to make an epub" - @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" - @echo " latexpdf to make LaTeX files and run them through pdflatex" - @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" - @echo " text to make text files" - @echo " man to make manual pages" - @echo " texinfo to make Texinfo files" - @echo " info to make Texinfo files and run them through makeinfo" - @echo " gettext to make PO message catalogs" - @echo " changes to make an overview of all changed/added/deprecated items" - @echo " xml to make Docutils-native XML files" - @echo " pseudoxml to make pseudoxml-XML files for display purposes" - @echo " linkcheck to check all external links for integrity" - @echo " doctest to run all doctests embedded in the documentation (if enabled)" - -clean: - rm -rf $(BUILDDIR)/* - -html: - $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." - -dirhtml: - $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml - @echo - @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." - -singlehtml: - $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml - @echo - @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." - -pickle: - $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle - @echo - @echo "Build finished; now you can process the pickle files." - -json: - $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json - @echo - @echo "Build finished; now you can process the JSON files." - -htmlhelp: - $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp - @echo - @echo "Build finished; now you can run HTML Help Workshop with the" \ - ".hhp project file in $(BUILDDIR)/htmlhelp." - -qthelp: - $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp - @echo - @echo "Build finished; now you can run "qcollectiongenerator" with the" \ - ".qhcp project file in $(BUILDDIR)/qthelp, like this:" - @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/ToFu.qhcp" - @echo "To view the help file:" - @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/ToFu.qhc" - -devhelp: - $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp - @echo - @echo "Build finished." - @echo "To view the help file:" - @echo "# mkdir -p $$HOME/.local/share/devhelp/ToFu" - @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/ToFu" - @echo "# devhelp" - -epub: - $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub - @echo - @echo "Build finished. The epub file is in $(BUILDDIR)/epub." - -latex: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo - @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." - @echo "Run \`make' in that directory to run these through (pdf)latex" \ - "(use \`make latexpdf' here to do that automatically)." - -latexpdf: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through pdflatex..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -latexpdfja: - $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex - @echo "Running LaTeX files through platex and dvipdfmx..." - $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja - @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." - -text: - $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text - @echo - @echo "Build finished. The text files are in $(BUILDDIR)/text." - -man: - $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man - @echo - @echo "Build finished. The manual pages are in $(BUILDDIR)/man." - -texinfo: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo - @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." - @echo "Run \`make' in that directory to run these through makeinfo" \ - "(use \`make info' here to do that automatically)." - -info: - $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo - @echo "Running Texinfo files through makeinfo..." - make -C $(BUILDDIR)/texinfo info - @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." - -gettext: - $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale - @echo - @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." - -changes: - $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes - @echo - @echo "The overview file is in $(BUILDDIR)/changes." - -linkcheck: - $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck - @echo - @echo "Link check complete; look for any errors in the above output " \ - "or in $(BUILDDIR)/linkcheck/output.txt." - -doctest: - $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest - @echo "Testing of doctests in the sources finished, look at the " \ - "results in $(BUILDDIR)/doctest/output.txt." - -xml: - $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml - @echo - @echo "Build finished. The XML files are in $(BUILDDIR)/xml." - -pseudoxml: - $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml - @echo - @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." diff --git a/_Old_doc/build/doctrees/ToFu_Geom.doctree b/_Old_doc/build/doctrees/ToFu_Geom.doctree deleted file mode 100644 index bf88fdd12..000000000 Binary files a/_Old_doc/build/doctrees/ToFu_Geom.doctree and /dev/null differ diff --git a/_Old_doc/build/doctrees/ToFu_Inv.doctree b/_Old_doc/build/doctrees/ToFu_Inv.doctree deleted file mode 100644 index 92c1df82b..000000000 Binary files a/_Old_doc/build/doctrees/ToFu_Inv.doctree and /dev/null differ diff --git a/_Old_doc/build/doctrees/ToFu_MatComp.doctree b/_Old_doc/build/doctrees/ToFu_MatComp.doctree deleted file mode 100644 index 41455f351..000000000 Binary files a/_Old_doc/build/doctrees/ToFu_MatComp.doctree and /dev/null differ diff --git a/_Old_doc/build/doctrees/ToFu_Mesh.doctree b/_Old_doc/build/doctrees/ToFu_Mesh.doctree deleted file mode 100644 index 08d926e8a..000000000 Binary files a/_Old_doc/build/doctrees/ToFu_Mesh.doctree and /dev/null differ diff --git a/_Old_doc/build/doctrees/environment.pickle b/_Old_doc/build/doctrees/environment.pickle deleted file mode 100644 index f3aacd96f..000000000 Binary files a/_Old_doc/build/doctrees/environment.pickle and /dev/null differ diff --git a/_Old_doc/build/doctrees/index.doctree b/_Old_doc/build/doctrees/index.doctree deleted file mode 100644 index 99546088f..000000000 Binary files a/_Old_doc/build/doctrees/index.doctree and /dev/null differ diff --git a/_Old_doc/build/doctrees/overview.doctree b/_Old_doc/build/doctrees/overview.doctree deleted file mode 100644 index 1ddb91597..000000000 Binary files a/_Old_doc/build/doctrees/overview.doctree and /dev/null differ diff --git a/_Old_doc/build/html/.buildinfo b/_Old_doc/build/html/.buildinfo deleted file mode 100644 index 4dc11b914..000000000 --- a/_Old_doc/build/html/.buildinfo +++ /dev/null @@ -1,4 +0,0 @@ -# Sphinx build info version 1 -# This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. -config: 0e24ef1ed19643c3b9d916e2ce6c47ea -tags: fbb0d17656682115ca4d033fb2f83ba1 diff --git a/_Old_doc/build/html/ToFu_Geom.html b/_Old_doc/build/html/ToFu_Geom.html deleted file mode 100644 index 18aa91f28..000000000 --- a/_Old_doc/build/html/ToFu_Geom.html +++ /dev/null @@ -1,941 +0,0 @@ - - - - - - - - - - ToFu_Geom — ToFu alpha documentation - - - - - - - - - - - - - - - - -
-
-
-
- -
-

ToFu_Geom

-

(This project is not finalised yet, work in progress...)

-

ToFu_Geom, is the first ToFu-specific module, it is dedicated to handling the 3D geometry of the diagnostic of interest. It defines 6 objects classes and many functions used as objects methods. It resorts to a module called General_Geom, which is not ToFu-specific (i.e.: it mostly conatins functions and has no reference to ToFu objects), which should be entirely re-written using Cython for faster computation. As all the other ToFu-specific modules, ToFu_Geom not only defines computing methods but also a variety of plotting methods that can be used to visualise various aspects and characteristics of the diagnostics as well as for debugging. -This section will first give a general presentation of the ToFu_Geom module and will then give a tutorial for building your own diagnostic.

-

ToFu is designed for handling passive radiation detectors (e.g.: bolometer foils, semi-conductor diodes or gas detectors), which can be placed behind an arbitrary number of collimating apertures of any shape and orientation. This goes also for the detector, represented by its active surface (the only constraint for apertures and detectors - in the current version - is that each must be represented by a planar polygon, but they do not have to be co-planar). Each detector is thus associated to a list of apertures through which it “sees” a certain volume. The volume of interest is limited, in the case of a Tokamak, to a chamber (i.e.: the vacuum vessel) represented in ToFu by a toroid, itself defined by a reference 2D polygon (usually the best possible representation of the inner walls of the Tokamak) which is then expanded toroidally. The volume “seen” by each detector is then the fraction of the toroid that it can “see” directly through its various apertures. On most fusion devices, such passive radiation detectors are located in a poloidal cross-section and arranged so that their cone of vision is very thin, such that it can be represented by a simple line (called a Line Of Sigh, or LOS) and an etendue. ToFu_Geom allows for a full 3D description of the whole system, and also for an accurate computation of the geometrically optimal LOS and its associated etendue value. Hence, it is possible to do everything with the two approaches (full 3D or LOS) and quatify the error due to the LOS approximation, if any.

-

This short introduction gives the key points addressed by ToFu_Geom, which can be summarized by listing the 7 object classes and their meaning :

- - ----- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The object classes in ToFu_Geom
NameDescriptionInputs needed
IDAn identity object that is used by all ToFu objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...)By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory.
TorThe limits of the toroidal chamberA 2D polygon in (R,Z) coordinates
LOSA LOS, can be defined by the user for tests, but usually defined by the Detect object as an outputA Tor object, a starting point and a unitary vector indicating the direction of observation (the end point is computed), both in 3D (X,Y,Z) coordinates
GLOSA group of LOS objects, with a name (useful for defining cameras which are sets of detectors with a common aperture and a commom name)A list of LOS objects
ApertureAn aperture, represented by a planar polygonA Tor object and a planar polygon in 3D (X,Y,Z) coordinates
DetectA detector, represented by its planar active surface, computed a geometrically optimal LOS as an outputA Tor object, a planar polygon in 3D (X,Y,Z) coordinates, and a list of Aperture objects
GDetectA group of Detect objects, useful for defining camerasA list of Detect objects
-

The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own diagnostics and access its main geometrical characteristics (the will be computed automatically).

-
-

Getting started with ToFu_Geom

-

Once you have downloaded the whole ToFu package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called Polygon which can be downloaded at http://www.j-raedler.de/projects/polygon/, just start a python interpreter and import ToFu_Geom (we will always import ToFu modules ‘as’ a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called ToFu_PathFile, and matplotlib and numpy will also be useful:

-
import numpy as np
-import matplotlib.pyplot as plt
-import ToFu_Geom as TFG
-import ToFu_PathFile as TFPF
-import os
-import cPickle as pck # for saving objects
-
-
-

The os module is used for exploring directories and the cPickle module for saving and loading objects.

-
-
-

The Tor object class

-

To define the volume of the vacuum chamber, you need to know the (R,Z) coordinates of its reference polygon (in a poloidal cross-section). You should provide it as a (2,N) numpy array where N is the number of points defining the polygon. To give the Tor object its own identity you should at least choose a name (i.e.: a character string). For more elaborate identification, you can define an ID object and give as an input instead of a simple name. You can also provide the position of a “center” of the poloidal cross-section (in 2D (R,Z) coordinates as a (2,1) numpy array) that will be used to compute the coordinates in transformation space any LOS using this Tor object (and the sinogram of any scalar emissivity field using this Tor object). If not provided, the center of mass of the reference polygon is used as a default “center”.

-

In the following, we will use the geometry of ASDEX Upgrade as a example. -We first have to give a reference polygon (‘PolyRef’ below) as a (2,N) numpy array in (R,Z) coordinates.

-
theta = np.linspace(0,2*np.pi,100)
-Rcoo = 1.5 + 0.75*np.cos(theta)
-Zcoo = 0.75*np.sin(theta)
-PolyRef = np.array([Rcoo,Zcoo])
-Tor1 = TFG.Tor('Tor_Example', PolyRef)
-print Tor1
-
-
-

Alternatively, you can store PolyRef in a file and save this file locally, or use one of the default tokamak geometry stored on the ToFu database where Tor input polygons are stored in 2 lines .txt files (space-separated values of the R coordinates on the first line, and corresponding Z coordinates on the second line). Here, we use the default ASDEX Upgrade reference polygon stored in AUG_Tor.txt.

-
RP = TFPF.Find_Rootpath()
-PathFile = RP + '/Inputs/AUG_Tor.txt'
-PolyRef = np.loadtxt(PathFile, dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2)
-Tor2 = TFG.Tor('AUG',PolyRef)
-print Tor2
-
-
-

We now have created two Tor objects, and ToFu_Geom has computed a series of geometrical characteristics that will be useful later (or that simply provide general information). -In particular, we have access to the following attributes :

- - ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a Tor object
AttributeDescription
self.IDThe ID class of the Tor object
self.PolyThe reference polygon used to create the Tor object, as a (2,N) numpy array, where N is the number of points (the last one being identical to the first one)
self.BaryPThe barycenter of self.Poly
self.SurfThe surface of self.Poly
self.BarySThe center of mass of self.Poly
self.VectThe 2D vectors representing the edges of self.Poly as a (2,N) numpy array
self.VinThe normalised 2D vectors oriented towards the inside of self.Poly for each edge
self.PRMin, self.PRMaxThe points of self.Poly with the maximum (resp. minimum) R coordinate, as a (2,1) numpy array (one for PRMin, one for PRMax)
self.PZMin, self.PZMaxThe points of self.Poly with the maximum (resp. minimum) Z coordinate, as a (2,1) numpy array (one for ZPMin, one for PZMax)
self.ImpRZThe (R,Z) coordinates of the point used for computing the impact factor (i.e. the coordinates in projection space(lien)) of the LOS objects using this Tor and of the enveloppe of this Tor (default is self.BaryS)
self.Imp_EnvThetaThe discretized values used for computing the enveloppe of Tor in projection space (where theta is in [0,pi], (lien))
self.Imp_EnvMinMaxThe enveloppe of Tor in projection space (lien) (i.e.: the - algebraic - minimum and maximum impact factor of the reference polygon for each value of self.Imp_EnvTheta)
-

In addition to these attributes, the Tor object has a number of built-in methods that can be used to visualise its characteristics. As in the whole ToFu package, the object methods used for plotting always begin with “self.plot...”, where the name of the method after “plot...” is relatively explicit. All the plotting methods are based on matplotlib, and in order to allow for flexibility and customization, you can either pass as input an already existing matplotlib axes on which to plot, or use a predefined default axes (simply by not specifying any axes). Similarly, extensive use of keyword argumants with default values is made, thus all plotting options are customizable since you can pass a dictionnary for element to be plotted (see the detailed documentation of each method to know which kwarg to use for which element).

-

As an example, you can plot the reference polygon of ASDEX Upgrade in both a poloidal and a toroidal projection, using the default axes (defined in ToFu_Geom) :

-
axP, axT = Tor2.plot_AllProj(Elt='PI')
-#plt.show()
-
-
-
-Poloidal and Toroidal projections of the reference polygon of ASDEX Upgrade -

Poloidal and Toroidal projections of the reference polygon of ASDEX Upgrade

-
-

Here we used the keyword argument ‘Elt’ to specify which elements which wanted to plot. We provided a string in which each letter is a code for an element. Here ‘P’ stands for the reference polygon and ‘I’ for the point used for computing the impact parameter of the enveloppe. -We can then re-use the axes of the poloidal projection to plot the vectors defining the edges and the inner side of the reference polygon:

-
axP = Tor2.plot_PolProj_Vect(ax=axP)
-#plt.show()
-
-
-

(for some mysterious reason it is not working on my Linux station, but it does work on my macbook, as it should)

-
-Poloidal projections of the vectorised reference polygon of ASDEX Upgrade -

Vector representation of the reference polygon of ASDEX Upgrade

-
-

We can also plot a 3D representation of the reference polygon, and specify that we only want to plot a fraction of it, between pi/4 and 7pi/4:

-
ax3 = Tor2.plot_3D_plt(thetaLim=(np.pi/4.,7.*np.pi/4.))
-#plt.show()
-
-
-
-3D fraction of the reference polygon of ASDEX Upgrade -

3D representation of the reference polygon of ASDEX Upgrade

-
-

We can also visualise the enveloppe of ASDEX Upgrade in the projection space (lien), in 2D or 3D, with a color of our choosing :

-
axImp = Tor2.plot_Impact_PolProj()
-axImp3 = Tor2.plot_Impact_3D()
-#plt.show()
-
-
-
-Representation in projection space of the reference polygon of ASDEX Upgrade -

Representation in projection space of the reference polygon of ASDEX Upgrade

-
-
-Representation in porojection space of the reference polygon of ASDEX Upgrade in 3D -

Representation in porojection space of the reference polygon of ASDEX Upgrade in 3D, this representation may be usefull when there are LOS which are not contained in a poloidal cross-section, as we will see later

-
-

Feel free to explore the various keyword arguments of each method. -This Tor object can then be used as a limit to the volume that can be detected by each LOS or Detect object.

-
-
-

The LOS object class

-

Since most tomography users in the fusion community are familiar with the LOS approximation (which gives satisfactory results in most usual situations), we choose to provide in ToFu the two extremes of the spectrum : a pure LOS approximation, and a full 3D approach. Any attempt to compute the geometry matrix with an “advanced” or “improved” LOS approximation (i.e.: taking into account finite beam width, using anti-aliasing techniques with pixels...) can be considered to fall somewhere between these two extremes, and since every user has his own recipes, we do not provide any except the two extreme approaches. Obviously, all users can download ToFu and add there own recipe in their local version (this should be done in the ToFu_MatComp module). Hence, a pure LOS object exists in ToFu, and can be defined with minimum knowledge of the diagnostics : only a point (D) and a unitary vector (u) are necessary for each LOS. The unitary vector shall be pointing towards the direction of observation (i.e.: towards the interior of the vacuum chamber). -nce a LOS id defined, ToFu automatically computes a series of points of interest. Indeed, if a Tor object is provided to the LOS object, we can determine the first point of entry into the Tor volume (PIn), and the point were the LOS gets out of it (POut). We can also determine the point on the LOS with minimum R-coordinate (PRMin, which is usually PIn or POut except when the LOS has a strong toroidal inclination, in which case PRMin is somewhere in the middle of the LOS). If the LOS object has a RZImp (by default the RZImp of the associated Tor object), then the impact parameter of the LOS with respect to this RZImp can be computed (has well as its two associated angles), and the LOS can be represented in projection space.

-

Hence, a LOS object has the following attributes :

- - ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a LOS object
AttributeDescription
self.IDThe ID class of the object
self.DThe starting point of the LOS in 3D (X,Y,Z) coordinates (usually the center of mass of the associated detector or the center of mass of the collimating slit for fan-like cameras)
self.uThe unitary direction vector in 3D (X,Y,Z) coordinates (oriented towards the interior of the associated Tor)
self.TorThe associated Tor object
self.PIn, self.POut, self.PRMinThe particular points, in 3D (X,Y,Z) coordinates, on the LOS associated to the Tor enveloppe : the point at which the LOS enters the volume, the point at which it exits it, and the point of minimum R-coordinate
self.kPIn, self.kPOut, self.kPRMinThe length on the LOS (from self.D) of self.PIn, self.POut and self.PRMin
self.RMinThe value of the R-coordinate of self.PRMin
self.PolProjAngAn estimate of the angle by which the LOS is distorted in its poloidal projection. Value of 0 means the LOS is already in a poloidal cross-section and remains a straight line.
self.PplotOut, self.PplotInThe points along the LOS used to plot its poloidal projection, either the whole LOS (self.PplotOut, from self.D to self.POut) or only the part which is inside the Tor volume (self.PplotIn, from self.PIn to self.POut)
self.ImpRZThe 2D (R,Z) coordinates used to compute the impact factor of the LOS (i.e. its coordinates in projection space), by default self.ImpRZ = self.Tor.ImpRZ
self.ImpP, self.ImpPk, self.ImpPr, self.ImpPThetaThe point on the LOS which is closest to the self.ImpRZ (the “impact point”) is self.ImpP, and its distance from self.D ((self.ImpP-self.D).(self.u) = self.ImpPk) is self.ImpPk. Its small (geometric) radius from self.ImpRZ is self.ImpPr and its toroidal angle is self.ImpPTheta
self.Impp, self.Imptheta, self.ImpphiThe coordinates of the LOS in projection space, where self.Impp is the (positive or negative) impact parameter, self.Imptheta is the projection angle in a poloidal cross-section and self.Impphi is the deviation angle from the poloidal crosss-section (the reference poloidal cross-section being the one which includes self.ImpP).
-

Defining a LOS object only requires a start point and a unitary vector indicating the viewing direction (both in 3D (X,Y,Z) coordinates), as well as an associated Tor object. -As an example, we can define Los, a LOS object as follows:

-
D = np.array([2.,-2.,1.]).reshape((3,1))    # Defining D (starting point)
-uu = np.array([-0.2,1.,-0.8])               # Defining uu (vector for direction of LOS)
-uu = (uu/np.linalg.norm(uu)).reshape((3,1)) # Normalising uu
-Los = TFG.LOS('LOS 1',D,uu,T=Tor2)          # Creating a LOS object using a pre-defined Tor object (kwdarg 'T' stands for Tor)
-print Los
-
-
-

Note that if you define a LOS objects that does not intersect the Tor volume, you will get an error message telling you that the code could not find a PIn or POut point (both are necessary). -All the geometric characteristics of Los has now been computed (the coordinates in projection space have been computed using the center of mass of the reference polygon of Tor2 as default, but they can be re-computed with another reference point, as we will see later). The built-in routines can be used to visualise Los, and we specify, thanks to the keyword argument ‘PDIOR’ that we not only want to see the LOS itself but also the position of the particular points that were computed or that were used for its definition (self.D, self.PIn, self.POut and self.PRMin => DIOR). In order to better visualise it, we plot it both in poloidal and toroidal projections, re-using a set of axes on which we first plot Tor2:

-
axP, axT = Tor2.plot_AllProj()
-axP, axT = Los.plot_AllProj(Elt='LDIORr',EltTor='PI')
-#plt.show()
-
-
-
-Poloidal and toroidal projection of a user-defined LOS object, with points of interest and parent Tor object -

Poloidal and toroidal projection of a user-defined LOS object, with points of interest and parent Tor object

-
-

We used again here the ‘Elt’ keyword argument to specify that we want to plot the LOS itself (‘L’), the particular points self.D (‘D’), self.PIn (‘I’), self.POut(‘O’), self.PRMin (‘R’) and self.ImpP (‘r’). -In fact, since the LOS object has a Tor attribute, the possibly of plotting the Tor object at the same time as the LOS object is provided in the same method, through the kwdarg ‘EltTor’ (just provide the same letters as for a Tor object).

-

Like for the Tor object, the LOS object can also be plotted in 3D using the plot_3D_plt() method:

-
ax3 = Los.plot_3D_plt(Elt='LDIORr',EltTor='PI',thetaLim=(0.,7.*np.pi/4.),MdictR={'c':'b','marker':'o','ls':'None'},Mdictr={'c':'r','marker':'+','ls':'None'})
-#plt.show()
-
-
-
-3D plot of a user-defined LOS object, with points of interest and parent Tor object -

3D plot of a user-defined LOS object, with points of interest and parent Tor object

-
-

Where we have plotted the associated Tor object using the kwdarg ‘EltTor’ and changed the dictionaries for the self.PRMin and self.ImpP points. -But generally matplotlib is not the best library for 3D plots with several objects, hence, mayavi is currently being considered for implementation since it is much more adapted to this particular task.

-

Also, the coordinates of Los in projection space can be plotted on the same graph as the enveloppe of Tor2 was plotted, in 2D or 3D (3D being relevant to take into account the fact that Los does not lie in a poloidal cross section). Beware that these coordinates depend on the reference point chosen. To illustrate this, we compute the impact parameter of Los with the default reference point (i.e.: the center of mass of its associated Tor object) in blue and with a different, arbitrary, reference point in red:

-
mdict = {'ls':'None','marker':'o','c':'b'}
-axImp = Los.plot_Impact_PolProj(ax=axImp,Mdict=mdict)   # Plot coordinates (impact parameter and angles) with respect to Los.Tor.ImpRZ = Tor2.ImpRZ (=center of mass of Tor2 by default)
-RefP2 = np.array([1.5,-0.05]).reshape((2,1))
-Los.set_Impact(RefP2)                                   # Compute the new coordinates (impact parameter and angles) with respect to RefP2
-mdict = {'ls':'None','marker':'o','c':'r'}
-axImp = Los.plot_Impact_PolProj(ax=axImp,Mdict=mdict)   # Plot new coordinates (impact parameter and angles) with respect to RefP2
-#plt.show()
-
-
-
-Coordinates in projection space of Los, with respect to two different reference points -

Coordinates in projection space of Los, with respect to two different reference points

-
-

N.B.: In fact the enveloppe of Tor2 changes also when we change the reference point, but only the first enveloppe is displayed here for clarity. -Now we know how to build a LOS object, get its main geometrical characteristics and plot it. But most tomography diagnotics rely on tens or hundreds of different LOS which, in the fusion community, are usually grouped in what is called “cameras”. A “camera” is typically a set of several detectors which share a common aperture in a fan-like arrangement, which is a good compromise between room saving (access is scarce around Tokamaks) and good geometrical coverage. Hence, a LOS can be defined for each detector as the line that runs through its center of mass and through the center of mass of its associated aperture. The fan-like arrangement means that all LOS belonging to the same camera will cross at their common aprture. To this purely goemterical consideration, one must add the data acquisition system which often treats data from a camera as a group of signals identified by a common nomenclature. -For these reasons, it is useful to define an object embodying the notion of “camera”, which is simply done by the GLOS object class.

-

ToDo : add visualisation options for for the reference points and LOS.ImpP in physical space (add kwdarg in existing functions)

-
-
-

The GLOS object class

-

The GLOS object class (where GLOS stands for Group of Lines Of Sight) is simply a list of LOS objects with a common ID class (i.e.: a common name and other identity features). It is useful for fast and easy handling of a large number of LOS.

- - ---- - - - - - - - - - - - - - - - - -
The attributes of a GLOS object
AttributeDescription
self.IDThe ID class of the object
self.LLOS, self.nLOSThe list of LOS objects contained in this group, and the number of LOS (self.nLOS = len(self.LLOS))
self.TorThe Tor object common to all LOS of self.LLOS
-

The methods of a GLOS object can be seperated in two categories. First, all the LOS objects methods are reproduced in such a way as to handle all the LOS contained in GLOS (for example with “for” loops). These include in particular the plotting methods. Second, some methods are provided to facilitate selection of sub-sets of LOS in the GLOS objects and handle them. -For example, we can create two cameras of respectively 10 amd 15 LOS:

-
P1, P2 = np.array([2.,-2.,1.]).reshape((3,1)), np.array([0.5,-0.5,-0.5]).reshape((3,1))    # Creating the common points
-n1, n2 = 10, 15                                                                         # Reminding the number of LOS in each camera
-phi1, phi2 = np.pi/7., np.pi/4.
-theta1, theta2 = np.linspace(np.pi/8.,np.pi/4.,n1), np.linspace(5.*np.pi/6.,6.5*np.pi/6.,n2)
-uu1 = np.array([-np.sin(phi1)*np.cos(theta1),np.cos(phi1)*np.cos(theta1),-np.sin(theta1)])  # Creating the unitary vectors
-uu2 = np.array([-np.sin(phi2)*np.cos(theta2),np.cos(phi2)*np.cos(theta2),-np.sin(theta2)])
-LLos1 = [TFG.LOS("Los1"+str(ii),P1,uu1[:,ii:ii+1],T=Tor2) for ii in range(0,n1)]          # Creating the lists of LOS objects
-LLos2 = [TFG.LOS("Los2"+str(ii),P2,uu2[:,ii:ii+1],T=Tor2) for ii in range(0,n2)]
-GLos1, GLos2 = TFG.GLOS("Cam1",LLos1), TFG.GLOS("Cam2",LLos2)                           # Creating the GLOS objects
-print GLos1, GLos2
-
-
-

We can then plot their poloidal and toroidal projections (without the particular points) , on top of the reference polygon of Tor2:

-
axP, axT = GLos1.plot_AllProj(Ldict={'c':'b'},Elt='L',EltTor='PI',Lplot='In')
-axP, axT = GLos2.plot_AllProj(axP=axP,axT=axT,Ldict={'c':'r'},Elt='L')
-#plt.show()
-
-
-
-Poloidal and toroidal projections of two arbitrary cameras, with differenmt plotting options -

Poloidal and toroidal projections of two arbitrary cameras, with differenmt plotting options

-
-

Notice here that we used the keyword argument “LPlot” to specify that the LOS of the first camera should only be plotted inside the Tor volume (i.e.: from PIn to POut) whereas tyhe default is to plot the entire LOS (Lplot=’Tot’, which plots the LOS from D to POut). We also used the “Ldict” kwdarg to specify a dictionnary for the plotting command.

-

Like the LOS objects, a GLOS object enables you to plot the coordinates in projection space of all the LOS it contains (in 2D or 3D):

-
axImp = Tor2.plot_Impact_PolProj()
-axImp = GLos1.plot_Impact_PolProj(ax=axImp,Mdict={'ls':'None','marker':'o','c':'b'})
-axImp = GLos2.plot_Impact_PolProj(ax=axImp,Mdict={'ls':'None','marker':'x','c':'r'})
-plt.show()
-
-
-
-Coordinates in projection space of the two cameras, with the Tor enveloppe -

Coordinates in projection space of the two cameras, with the Tor enveloppe

-
-

In adition to these methods which echo the methods of the LOS class, the GLOS class offers tools to select subsets of the list of LOS from which it was created. -This is were the relevance of the ID class starts to show, indeed, besides the Name that you gave to your objects, you might want to store data which is both specific to these objects and to your needs or to the naming conventions of your laboratory. For example, you might want to know enter the signal code associated to each detector, or the age of each detector (to have an idea of the effect of ageing on its performance)... Hence, when you create an object - like a LOS for example - you can add extra attributes to its ID class. These attributes are anything that you consider helpful to identify / discriminate a particular object. In the following example, we re-create a camera, but we add the code signal (“Signal” + number of LOS) and age (between 0 and 3 years) of each detector to the ID class of its LOS:

-
for ii in range(0,n1):
-    GLos1.LLOS[ii].Id.Code = "Signal"+str(ii*10)
-    GLos1.LLOS[ii].Id.Age = 3.*ii/n1
-
-
-

We can now ask the GLOS object to give us a list of its LOS that match a criteria of our choosing. -There are two methods to do this. They take the same arguments, but the first one will return a numpy array of boolean indices (for later use if you need it), while the second one directly returns a list of LOS objects (and uses the first one). For example, we use the first one to get the indices of LOS with a signal code equal to “Signal0” or “Signal50”, and the second one to get a list of LOS aged less than a year:

-
    ind = GLos1.get_ind_LOS('Code',"in ['Signal0','Signal50']")
-subLLos = GLos1.pick_LOS('Age','<=1.')
-print ind, subLLos
-
-
-

The flexibility is provided through the use of eval() which allows for string expressions. -These methods are intended to provide the necessary flexibility for quick adaptation to your specific needs. Depending on return on experience, it may also evolve or be developped further. -Alternatively, you can also build a list of the attributes you are interest in and then use the list() methods to get the indices you want:

-
LAttr = [los.Id.Code for los in GLos1.LLOS]
-ind = [LAttr.index('Signal0'), LAttr.index('Signal50')]
-print ind
-
-
-

As was already said, ToFu provides you with the possibility of defining and using LOS if you wish, however, its main features reside in the 3D description of the diagnostic, of which the LOS description is just an approximation (which can be sufficient for your needs, depending on the geometry of your system, on the physics at play and on the accuracy that you want to acheive). -In the following, we introduce the Detect class which is used to handle the 3D geometry of the problem. Once a Detect object is created, it can be associated to several Aperture objects to determine its 3D field of view. Consequently, the geometrically optimal LOS can also be computed and the associated LOS object can be easily produced on demand, we then generally advise to directly create Detect objects, of which LOS objects can be seen as a subproduct.

-
-
-

The Detect and Apert object classes

-

In addition to what as been said above, creating dierctly a Detect object instead of a LOS object will provide you with the ability to compute an accurate value of the etendue associated to the LOS approximation (link to definition of etendue and why it is important for proper use of LOS approximation). -In its current version, ToFu handles apertures as 3D planar polygons which, to this day, have the following attributes:

- - ---- - - - - - - - - - - - - - - - - - - - -
The attributes of an Apert object
AttributeDescription
self.IDThe ID class of the object
self.Poly, self.PolyNA (3,N) numpy array representing a planar polygon in 3D (X,Y,Z) coordinates, and the number of points that this polygon is comprised of.
self.BaryP, self.BaryS, self.S, self.nInThe barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume.
self.TorThe Tor object associated to the Detect object
-

The Apert object is mainly used as a computing intermediate for the Detect object. However, it does come along with some key plotting methods aimed at giving you an idea of its geometry in the usual projections (poloidal and toroidal) and in 3D.

-

Similarily, ToFu handles apertures as 3D planar polygons (i.e.: the polygon embodying the active surface of the detector) which, to this day, have the following attributes:

- - ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a Detect object
AttributeDescription
self.IDThe ID class of the object
self.Poly, self.PolyNA (3,N) numpy array representing a planar polygon in 3D (X,Y,Z) coordinates, and the number of points that this polygon is comprised of.
self.BaryP, self.BaryS, self.S, self.nInThe barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume
self.TorThe Tor object associated to the Detect object
self.LApertA list of Apert objects associated to the Detect object
self.LOSA LOS object corresponding to the geometrically optimal LOS going through self.BaryS and through the center of mass of the intersection of all the associated Apert objects. Its value is ‘Impossible’ if the geometry you built does not allow for the existence of a LOS (i.e.: if the volume inside Tor viewed by the detector through its apertures is zero).
self.LOS_TorAngRefThe value of toroidal angle corresponding to the position of the middle of the LOS (between self.LOS.PIn and self.LOS.POut), used by plotting methods, can be different from the toroidal angle of the detector if the LOS does not stand in a poloidal cross-section.
self.LOS_Etend_0Approx, self.LOS_Etend_0ApproxRev, self.LOS_Etend_PerpSamp, self.LOS_Etend_Perp, self.LOS_Etend_RelErrValues of the etendue, computed respectively using a fast 0th order approximation, a 0th order approximation reversed, a sampled integral in a plane perpendicular to the LOS, an adaptative integral in a plane perpendicular to the LOS. The last attribute is the relative error tolerance used for the adaptative computation of the integral (default is 0.01 %).
self.Span_R, self.Span_Theta, self.Span_Z, self.Span_kThe tuples indicating the min and max values of the cylindrical (R,Theta,Z) coordinates inside which the viewing cone of the Detect object can be found. These are limits that define a box inside which the viewing cone is found, they do not give the viewing come itself. The Span_k attribute corresponds to the span of the component along self.LOS.u that can be reached inside the viewing cone (estimated by sampling the viewing cone into more than 1000 LOS - the exact number depends on self.Poly and on the shapes of the apertures and can be tuned by parameters).
self.Span_NEdge, self.Span_NRadThe parameters that were used for computing the span in cylindrical coordinates of the system. The first one quantifies the number of extra points added on the polygon edges, and the second one the number of extra points added in the radial direction.
self.Cone_PolyPol, self.Cone_PolyTorThe poloidal and toroidal projectiosn of the 3D viewing cone of the {detector+apertures} system. These projected polygons are useful for visualising the detected volume (or rather its projections) and for fast discrimination of points which are inside / outside of this detected volume (i.e.: fast computation of integrated signal)
-

Now we are going to create two arbitrary Apert objects and one Detect object to show how it is done and what information it gives access to. -As already mentioned, the various Apert objects associated to a Detect object must be planar polygons, but they do not need to be coplanar, and they can have any arbitrary shape, hence:

-
d1, d2, d3 = 0.02, 0.02, 0.02                                                                                                       # Characteristic size of each polygon
-C1, C2, C3 = np.array([1.56,-1.49,0.75]), np.array([1.52,-1.38,0.70]), np.array([1.60,-1.60,0.80])                                  # Creating the centers around which the polygons will be built
-C1, C2, C3 = C1.reshape((3,1)), C2.reshape((3,1)), C3.reshape((3,1))
-n1, n2, n3 = np.array([0.1,-2.,0.5]), np.array([1.,-1.,0.8]), np.array([1.,-1.,0.])                                                 # Creating the vectors of there respective planes
-n1, n2, n3 = n1/np.linalg.norm(n1), n2/np.linalg.norm(n2), n3/np.linalg.norm(n3)
-e11, e21, e31 = np.cross(n1,np.array([0.,0.,1.])), np.cross(n2,np.array([0.,0.,1.])), np.cross(n3,np.array([0.,0.,1.]))
-e11, e21, e31 = e11/np.linalg.norm(e11), e21/np.linalg.norm(e21), e31/np.linalg.norm(e31)
-e12, e22, e32 = np.cross(n1,e11), np.cross(n2,e21), np.cross(n3,e31)                                                                # Building a local normalised base of vector
-Poly1 = d1*np.array([[-1, 1, 0.],[-1, -1, 1]])                                                                                      # The first polygon is a triangle
-Poly2 = d2*np.array([[-1, 1, 1.5, 0., -1.5],[-1, -1, 0., 1, 0.]])                                                                   # The second one is a pentagon
-Poly3 = d3*np.array([[-1., 1, 1, -1,],[-1., -1., 1., 1.]])                                                                          # The third is a rectangle
-Poly1 = np.dot(C1,np.ones((1,Poly1.shape[1]))) + np.dot(e11.reshape((3,1)),Poly1[0:1,:]) + np.dot(e12.reshape((3,1)),Poly1[1:2,:])
-Poly2 = np.dot(C2,np.ones((1,Poly2.shape[1]))) + np.dot(e21.reshape((3,1)),Poly2[0:1,:]) + np.dot(e22.reshape((3,1)),Poly2[1:2,:])
-Poly3 = np.dot(C3,np.ones((1,Poly3.shape[1]))) + np.dot(e31.reshape((3,1)),Poly3[0:1,:]) + np.dot(e32.reshape((3,1)),Poly3[1:2,:])
-
-Ap1, Ap2 = TFG.Apert('Apert1', Poly1, T=Tor2), TFG.Apert('Apert2', Poly2, T=Tor2)                                                   # Creating the two apertures, using Tor2
-D1 = TFG.Detect('Detect1', Poly3, T=Tor2, LApert=[Ap1,Ap2], CalcEtend=True, CalcCone=True)                                         # Creating the Detect, using Tor2 and the two apertures, calculation of the etendue
-print Ap1, Ap2, D1
-print D1.LOS, D1.LOS_Etend_Perp
-
-
-

Note that the computation may take some time (several seconds) due to the accurate computation of the etendue. If you do not need the etendue, you can avoid its computation using the kwdarg ‘CalcEtend’=False (default value is True). -Once we can check that the constructed geometry is relevant (i.e.: that it allows for a non-zero detected volume, which means that a LOS should exist), we can plot the associated Detect elements and LOS:

-
axP, axT = D1.plot_AllProj(Elt='PV', EltApert='PV', EltLOS='LDIORr', EltTor='PI')
-#plt.show()
-
-
-
-Poloidal and toroidal projections of a Detect object with all its associated objects (2 Apert objects, a Tor object, and a subsequent LOS object) -

Poloidal and toroidal projections of a Detect object with all its associated objects (2 Apert objects, a Tor object, and a subsequent LOS object)

-
-

As said earlier, the three polygons do not have to be coplanar, as is visible on the next figure on which we only plotted the two Apert objects and the Detect object (with their perpendicular vectors), as well as the start point of the LOS and its entry point into the Tor volume (in blue):

-
ax3 = D1.plot_3D_plt(Elt='PV', EltApert='PV', EltLOS='DI', EltTor='',MdictI={'c':'b','marker':'o','ls':'None'})
-#plt.show()
-
-
-
-3D plot of an arbitrary Detect object with two non-coplanar Apert objects -

3D plot of an arbitrary Detect object with two non-coplanar Apert objects

-
-

Since the Detect object has a LOS object as an attribute, all the LOS methods are accessible via this LOS attribute, making it easy to plot the coordinates in projection space of the LOS of this particular Detect object.

-

We saw that the etendue is computed automatically when the Detect object is defined. This is done via numerical integration, on a plane perpendicular to the goemetrically optimal LOS, of the solid angle subtended by the Detect and its Apert objects. ToFu_Geom thus has built-in routines to compute that solid angle from any point in the 3D Tor volume. This will also be useful to compute the total signal received by the detector from a given radiation field. -Of course, when taken on a plane perpendicular to the geometrically optimal LOS, the solid angle decreases as we get to the edge of the viewing cone. You can visualise the solid angle on any plane perpendicular to the LOS simply by choosing its relative position on the LOS via the ‘Ra’ kwdarg of the following method (0 and 1 corresponding respectively to the PIn and POut points of the LOS):

-
ax = D1.plot_SAng_OnPlanePerp(Ra=0.5)
-#plt.show()
-
-
-
-Surface plot of the solid angle subtended by the {Detector + Apertures} system as seen from points on a plane perpendicular to the system's LOS and placed at mid-length of the LOS -

Surface plot of the solid angle subtended by the {Detector + Apertures} system as seen from points on a plane perpendicular to the system’s LOS and placed at mid-length of the LOS

-
-

The value in parenthesis in the title is a ratio (here 1 %) used to plot make sure the plot includes the entirety of the viewing cone in this plane (i.e.: the plotting surface is 1 % larger than the estimated support of the viewing cone). The reason why this surface has no easily reckognisable shape is due to the fact that it comes from a system consisting of 3 non-coplanar polygons with various shapes. If we had used a square detector with a coplanar square aperture, the square shape would have been visible on the iso-contours of the solid angle.

-

Similarily, it is possible to simply plot the evolution of the etendue (solid angle integrated on the plane) as a function of the point’s distance on the LOS (to check that it remains constant), using three different integration methods (two via discretisation and one via an adaptative algorithm), this may take a while because the etendue has to computed Nx3 times (3 times for each point):

-
ax = D1.plot_Etend_AlongLOS(NP=5, Colis=False, Modes=['simps','trapz','quad'], Ldict=[{'c':'k','ls':'dashed','lw':2},{'c':'b','ls':'dashed','lw':2},{'c':'r','ls':'dashed','lw':2}])
-#plt.show()
-
-
-
-Etendue of the {Detector + Apertures} system as a function of the relative distance along the LOS (from 0 = self.LOS.PIn to 1 = self.LOS.POut), with three different integration methods using their defaults settings -

Etendue of the {Detector + Apertures} system as a function of the relative distance along the LOS (from 0 = self.LOS.PIn to 1 = self.LOS.POut), with three different integration methods using their defaults settings

-
-

We can see that the default settings used for each method are sufficient to give an accurate computation of the etendue that remains constant along the LOS, as it should.

-

Now, in order to explore further the geometry of the system, we can plot the value of the solid angle in any poloidal plane (respectively horizonntal plane) intersecting the viewing cone, we can then visualise the viewing cone:

-
axSAP, axNbP = D1.plot_PolSlice_SAng()
-axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP)
-axSAT, axNbT = D1.plot_TorSlice_SAng()
-axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT)
-#plt.show()
-
-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same poloidal slice (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can “see” each point of the same poloidal slice (this will be useful for systems with several detectors)

-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same horizontal slice (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can “see” each point of the same horizontal slice (this will be useful for systems with several detectors)

-
-

By default the poloidal slice is the plane which interssects the LOS at mid-length, by you can choose any toroidal angle by using the “Theta” kwdarg. -Note that the above plots are poloidal (resp. horizontal) slices, not projections. In its current version, ToFu_Geom only allows to plot projections by computing the solid angle for several discrete slices (25 by default, plus particular slices including self.LOS.PIn, self.LOS.POut and the mid-length point) close to each other and plotting the maximum value for each points (computation is very long in the current, non-optimised, python-only version, typically 20-30 min for 10 slices):

-
axSAP, axNbP = D1.plot_PolProj_SAng()
-axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP)
-axSAT, axNbT = D1.plot_TorProj_SAng()
-axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT)
-#plt.show()
-
-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection (Right) Number of detectors that can “see” each point (this will be useful for systems with several detectors)

-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection (Right) Number of detectors that can “see” each point (this will be useful for systems with several detectors)

-
-

Notice that there is a collision-detection routine in the ray tracing code that takes into account the fact that the viewing cone is limited by the Tor instance. This caveat can be de-activated by using the “Colis” kwdarg (=True by default), as illustrated in the following:

-
axSAP, axNbP = D1.plot_PolProj_SAng(Colis=False)
-axSAP, axNbP = D1.Tor.plot_PolProj(ax=axSAP), D1.Tor.plot_PolProj(ax=axNbP)
-axSAT, axNbT = D1.plot_TorProj_SAng(Colis=False)
-axSAT, axNbT = D1.Tor.plot_TorProj(ax=axSAT), D1.Tor.plot_TorProj(ax=axNbT)
-#plt.show()
-
-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection with de-activated collision detection (Right) Number of detectors that can “see” each point (this will be useful for systems with several detectors)

-
-
-(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) -

(Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection with de-activated collision detection (Right) Number of detectors that can “see” each point (this will be useful for systems with several detectors)

-
-

These plotting commands give a pretty good idea of the fraction of the Tor volume which is seen by the detector through its associated apertures. -It is actually these functionalities (plotting poloidal and toroidal projections of the solid angle) that are used to extract the poloidal and toroidal projections of the viewing cone as two sets of 2D polygons (i.e.: the 0 iso-contours of the solid angle projections). These two projected polygons can be simply plotted by adding ‘C’ (like ‘cone’) to the “Elt” kwdarg of the plot_PolProj and plot_TorProj plotting methods:

-
axP, axT = D1.plot_AllProj(Elt='PVC', EltApert='PV', EltLOS='LDIORr', EltTor='PI')
-#plt.show()
-
-
-
-Poloidal and toroidal projections of Detect elements, this time including the projected polygons of the viewing cone -

Poloidal and toroidal projections of Detect elements, this time including the projected polygons of the viewing cone

-
-

If you want to use the LOS approximation, you have to make sure it is valid. This approximation relies on several assumptions, one of which is that the etendue must remain constant along the LOS. We confirmed this in our case when we plotted it. However, we did not take into account the fact that a fraction only of the viewing stops where the LOS stops, and that the other fraction continues its way into the vacuum chamber. This means that there will be contributons to the signal which are not taken into account by the current LOS. An option could be to artificially extend the LOS through the central solenoid to the far end of the viewing cone, but this would still be unsufficient since the etendue that should be used for this extended part of the LOS is lower than the one we computed for the first part of the LOS. -This type of situations, in which a fraction of the viewing cone is obstructed, corresponds to situations in which the etendue is in fact not constant along the entirety of the extended LOS (i.e.: extended to the far end of the viewing cone), as illustrated below. It reveals the limits to the LOS approximation and the advantages of a 3D description of the geometry.

-
ax = D1.plot_Etend_AlongLOS(NP=14, Length='kMax', Colis=True, Modes=['simps'], PlotL='abs', Ldict=[{'c':'k','ls':'None','lw':2,'marker':'x','markersize':10}])
-ax = D1.plot_Etend_AlongLOS(ax=ax, NP=14, Length='kMax', Colis=False, Modes=['simps'], PlotL='abs', Ldict=[{'c':'r','ls':'None','lw':2,'marker':'o','markersize':10}])
-ax = D1.plot_Etend_AlongLOS(ax=ax, NP=6, Length='POut', Colis=True, Modes=['simps'], PlotL='abs', Ldict=[{'c':'b','ls':'None','lw':2,'marker':'+','markersize':10}])
-plt.show()
-
-
-
-Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. -

Etendue of the {Detector + Apertures} system as a function of the relative distance along the extended LOS, with and without taking into account collisions, and along the former LOS.

-
-

In addition to this effect, it also possible to visualise the difference between the LOS approximation and the real viewing cone by plotting the contour of the viewed volume in projection space, as illustrated below:

-
axImp = D1.plot_Impact_PolProj(Elt='DLT')
-plt.show()
-
-
-
-Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. -

Etendue of the {Detector + Apertures} system as a function of the relative distance along the extended LOS, with and without taking into account collisions, and along the former LOS.

-
-

The more the area delimited by the contour is small, the better is the LOS approximation. We can clearly see here that the difference is significant. But it could nevertheless still be valid if the tomogram of the observed emissivity field was constant on this area (which is not the case in most standard situations).

-

More that visualisation or computation of the etendue, knowing the two projected polygons of the viewing cone is helpful for faster integration of signal in a synthetic diagnostic approach. Indeed, we know that all points which are not in both projected polygons are necessarily outside of the viewing cone. Hence, they can be used for fast discrimination of points which are useless for the signal.

-

Hence, the total incoming power on the detector for a given spectrally-integrated 3D emissivity field (provided as an input function of the position as a (3,1) numpy array) can be computed. As for the computation of the etendue, you can choose between three integration methods (via the “Mode” kwdarg), among which two discretisation methods and an adaptative algorithm (computation may be very long for high resolution discretisation with Colis=True). The following example shows a simple gaussian profile, constant on the toroidal direction:

-
def Emiss1(Points):
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Z = Points[2,:]
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-RR, ZZ = np.linspace(Tor2.PRMin[0], Tor2.PRMax[0],100), np.linspace(Tor2.PZMin[1], Tor2.PZMax[1],200)
-RRf, ZZf = np.ones((200,1))*RR, ZZ.reshape((200,1))*np.ones((1,100))
-Val = Emiss1(np.array([RRf.flatten()*np.cos(0), RRf.flatten()*np.sin(0), ZZf.flatten()]))
-ax = Tor2.plot_PolProj(Elt='P')
-Val[~Tor2.isinside(np.array([RRf.flatten(),ZZf.flatten()]))] = np.nan
-ax.contourf(RRf, ZZf, Val.reshape((200,100)),50)
-#plt.show()
-SigLOS1, SigLOS2 = D1.calc_Sig(Emiss1, Method='LOS', Mode='quad'), D1.calc_Sig(Emiss1, Method='LOS', Mode='simps')
-SigCol = D1.calc_Sig(Emiss1, Colis=True, Mode='simps')
-SigNoC = D1.calc_Sig(Emiss1, Colis=False, Mode='simps')
-print "Signals :", SigLOS1, SigLOS2, SigCol, SigNoC
-
-
-
'Signals : [  8.34905419e-08] [  6.33159209e-08]'
-
-
-
-Fake (double gaussian) SXR emissivity field (toroidally invariant) which resembles one of the typical cases of ASDEX Upgrade -

Fake (double gaussian) SXR emissivity field (toroidally invariant) which resembles one of the typical cases of ASDEX Upgrade

-
-

As one can expect, the signal is higher when collisions with the Tor boundary are not considered because of the contribution from the plasma volume which should be hidden behind the central solenoid.

-

This direct approach is most accurate (provided sufficient discretisation of the integral) since it does not rely on a generic pre-defined spatial discretisation of the 3D emissivity on a mesh. -Such discretisation is nonetheless necessary for tomographic inversions and allows for much faster synthetic diagnostic computation since the input emissivity function can be projected on so-called ‘basis functions’ with pre-computed contributions (via the so-called geometry matrix) to each detector. Spatial discretisation is addressed in the ToFu_Mesh module and the computation of the geometry matrix (both with a 3D and a LOS approach) is addressed in the ToFu_MatComp module.

-

But before, let us describe the last object class of ToFu_Geom, which is the Detect equivalent of the GLOS object class.

-
-
-

The GDetect object class

-

The GDetect object class provides an easy way to handle groups of detectors which have some features in common, like the GLOS object class does for LOS objects. It is basically a list of Detect objects with a common name and adapted methods for easily computing and plotting the characteristics of all detectors it contains with a single-line command. It also comes with selection methods to extract a sub-set of its Detect objects.

- - ---- - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a GDetect object
AttributeDescription
self.IDThe ID class of the object
self.LDetect, self.nDetectA list of Detect objects, which should have the same Tor object, and the number of Detect object it contains
self.BaryP, self.BaryS, self.S, self.nInThe barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume
self.TorThe Tor object associated to the Detect object
self.LApertA list of Apert objects associated to the Detect object
-

Naturally, the methods are similar to both the GLOS object and Detect object class. -In the following, the GDetect object class is illustrated with the geometry of the F camera of the SXR diagnostic of ASDEX Upgrade. -Once it is loaded as a ToFu_Geom GDetect instance, we can use the built-in methods to explore its characterisrics, like the etendue of each detector it is comprised of:

-
pathfileext = './Objects/TFG_GDetect_AUG_SXR_F_D20141202_T230455.pck'
-with open(pathfileext, 'rb') as input:
-    F = pck.load(input)
-
-# Plot etendues
-ax = F.plot_Etendues()
-plt.show()
-
-
-
-Etendues of the detectors composing the F camera of ASDEX Upgrade -

Etendues of the detectors composing the F camera of ASDEX Upgrade, computed using the usual 0-order approximation in both ways (direct and reverse), and using a complete integration with an adaptative algorithm (with reltive error < 0.01 %) and a sampling algorithm.

-
-

We can also visualise the lines of sight and projected viewing cones of all the detectors. In the following example, we use the ‘Elt’-type kwdrg to specify that we first want to plot the viewing cone and the polygon constituting the detectors (‘CP’), with the polygons of the apertures (‘P’) and the reference polygon of the Tor (‘P’), but no LOS (‘’). Then we plot the LOS (‘L’) but not the viewing cones.

-
axP1, axT1 = F.plot_AllProj(Elt='CP',EltApert='P',EltLOS='',EltTor='P')
-axP2, axT2 = F.plot_AllProj(Elt='P',EltApert='P',EltLOS='L',EltTor='P')
-plt.show()
-
-
-
-Poloidal and toroidal projections of the geometry of F, with the viewing cones -

Poloidal and toroidal projections of the geometry of F, with the viewing cones

-
-
-Poloidal and toroidal projections of the geometry of F, with the LOS -

Poloidal and toroidal projections of the geometry of F, with the LOS

-
-

We can also select one particular detector to plot it only. To do this we can use the dedicated routine which return the index of a detector reckognizable by one its ID attributes (its name, its signal code, its savename or any ID attribute that you have previously passed).

-
ind = F.get_ind_Detect(IDAttr='Name',IDExp="=='F_021'").nonzero()[0]
-axP3, axT3 = F.LDetect[ind].plot_AllProj(Elt='CP',EltApert='',EltLOS='L',EltTor='P')
-plt.show()
-
-
-
-Poloidal and toroidal projections of the geometry of one particular detector of F -

Poloidal and toroidal projections of the geometry of of one particular detector of F

-
-

It is also interesting to plot the LOS and viewing cones in projection space, to see how a realistic diagnotic looks like in this representation and see how far we are from a pure LOS (specifying we want the LOS ‘L’, the viewing cone ‘C’ and the Tor enveloppe ‘T’):

-
ax = F.plot_Impact_PolProj(Elt='CLT')
-plt.show()
-
-
-
-Representation in projection space of both the LOS and the viewing cones of F, with the Tor enveloppe -

Representation in projection space of both the LOS and the viewing cones of F, with the Tor enveloppe. One detector spans from theta values close to pi to values close to 0, which explains the boundaries of its associated cone streching from one end of the graph to the other (in reality, it should be separated in two polygons on this graph).

-
-

We can see that the surfaces corresponding to the viewing cone are resonably small (and quite elongated), which is an indication that the LOS approximation seems a reasonable hypothsesis from a purely geometrical point of view, but of course, in practice it also depends on the nature / shape of the observed emissivity field.

-

When it comes to computing the signal of each detector associated to an arbitrary input emissivity field, one must keep in mind that while the LOS approximation allows for fast but approximate computation, a full-3D approach ives an accurate result, but is much slower. While fractionaof second is sufficient for a LOS computation, several minutes can be necessary for each detector for a full 3D computation. Of course, it depends on the volume which is inside the viewing cone and on the level of accuracy to be obtained. The method used is simple sampling in cartesian coordinates of the viewing cone. The default is a uniform grid of 5mmx5mmx5mm, which appears sufficient for most standard cases. But the user can choose his own grid size by using the ‘dX12’ (=[0.005,0.005] by default, in the plane perpendicular to the LOS) and ‘ds’ (=0.005 by default, along the LOS). -Since the user may often need to evaluate the signal not only once but several times for each detector (fro example to plot the time evolution of the signal), it is possible to store a pre-computed grid (the solid angle, which is the longest value to calculate, is pre-computed) and use it for all the successive computations (the pre-computed solid angle is then simply multiplied by the local value of the input emissitivity and integration is performed by summation and multiplication by the elementary volume).

-

An example is given below, where three input emissivity fields are provided. The first one is toroidally constant, the local maximum of the second one rotates as if it were a hot spot on the q=1 surface, and the last one is toroidally constant but has an anisotropic radiation (it radiates 100 times more in the toroidal direction).

-
# Define toroidally constant emissivity
-def Emiss1(Points):
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Z = Points[2,:]
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-
-# Define toroidally variable emissivity
-def Emiss2(Points):
-    ROff = 0.05
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Theta = np.arctan2(Points[1,:],Points[0,:])
-    Z = Points[2,:]
-    CentR = 1.68+ROff*np.cos(Theta)
-    CentZ = 0.05+ROff*np.sin(Theta)
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-CentR)**2/0.08**2 - (Z-CentZ)**2/0.15**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-
-# Define anisotropic emissivity
-def Emiss3(Points, Vect):
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Theta = np.arctan2(Points[1,:],Points[0,:])
-    Z = Points[2,:]
-    Cos = -np.sin(Theta)*Vect[0,:] + np.cos(Theta)*Vect[1,:]
-    Sin2 = Vect[2,:]**2 + (np.sin(Theta)*Vect[1,:] + np.cos(Theta)*Vect[0,:])**2
-    sca = 100.*Cos**2+1.*Sin2
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2)
-    Val = Val*sca
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-
-
-

Since we know we are going to use the same grid several times, we pre-compute it (using the default parametrisation), the pre-computed matrix is then automatically assigned as a new attibute of each Detect object (this may take 2-5 min for each detector ToDo : implement a full C-version of the bottleneck routines for faster computation):

-
# Pre-compute the grid
-F.set_SigPreCompMat()
-
-
-

And then we compute the LOS and 3D signals, specifying that we want to use the pre-computed grid for faster computation (now the computation should take less than a second for each detector):

-
Sig1, Sig2 = F.calc_Sig(Emiss1, Method='LOS',Mode='quad'), F.calc_Sig(Emiss1, Method='Vol',Mode='simps', PreComp=True)
-ax = F.plot_Sig(Sig1)
-ax.plot(np.arange(1,F.nDetect+1), Sig2 ,label='Vol', c='r')
-plt.show()
-
-
-
-The integrated signals of camera F with a toroidally constant input emissivity (both with a LOS and 3D approach) -

The integrated signals of camera F with a toroidally constant input emissivity (both with a LOS and 3D approach)

-
-

It can be seen that even for toroidally constant emissivity, there are some small differences between the pure LOS integration and the full 3D computation (of the order of 1-2 % in the most central LOS, and up to 10 % near the edge). In order to check that these differences are real and are not due to discretization errors or bad implementation of the 3D integrating algorithm, we can do the following: we provide as an input an emissivity field that only varies with Z. Indeed the F camera is mostly looking upward, hence, if the emissivity field only changes with Z, the validity of the LOS approximation should be very good and the difference the LOS and 3D integrations should be minimal since the emissivity is indeed quasi-constant on planes perpendicuar to the LOS.

-
def EmissZ(Points):
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Z = Points[2,:]
-    Val = np.exp(-(Z-0.05)**2/0.35**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-
-
-
-The integrated signals of camera F with a horizontally constant input emissivity (both with a LOS and 3D approach) -

The integrated signals of camera F with a horizontally constant input emissivity (both with a LOS and 3D approach)

-
-

We can see here indeed that the agreement is particularly good for the most central LOS (which are the most vertical, hence the LOS approximation holds mostly for them), and less for the edge LOS which are more and more inclinated with respect to the iso-emissivity surfaces.

-

We can now try to do the same for the second input emissivity (with “m=1-like” perturbation):

-
Sig1, Sig2 = F.calc_Sig(Emiss2, Method='LOS',Mode='quad'), F.calc_Sig(Emiss2, Method='Vol',Mode='simps', PreComp=True)
-ax = F.plot_Sig(Sig1)
-ax.plot(range(1,F.nDetect+1), Sig2 ,label='Vol', c='r')
-ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig2.png",frameon=None,bbox_inches=0)         # Saving for Tutorial illustration
-plt.show()
-
-
-
-The integrated signals of camera F with a toroidally varying input emissivity (both with a LOS and 3D approach) -

The integrated signals of camera F with a toroidally varying input emissivity (both with a LOS and 3D approach)

-
-

We observe that the change with respect to the toroidally constant emissivity is not dramatic, which can be explained by the averaging effect of the cone of sight.

-

And finally for the anisotropic emissivity, note that in this case we have to specify to the method that the emissivity is anisotropic.

-
Sig1, Sig2 = F.calc_Sig(Emiss3, Ani=True, Method='LOS',Mode='quad'), F.calc_Sig(Emiss3, Ani=True, Method='Vol',Mode='sum', PreComp=True)
-ax = F.plot_Sig(Sig1)
-ax.plot(range(1,F.nDetect+1), Sig2 ,label='Vol', c='r')
-ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig3.png",frameon=None,bbox_inches=0)         # Saving for Tutorial illustration
-plt.show()
-
-
-
-The integrated signals of camera F with an anisotropic  input emissivity (both with a LOS and 3D approach) -

The integrated signals of camera F with an anisotropic input emissivity (both with a LOS and 3D approach)

-
-

Unsurprisingly, this case displays the most dramatic differences between the LOS approach, intrinsically limited, and the 3D computation. The observed differences range from a few percents (very small anisotropy) to several orders of magnitude (purely forward-domiated radiation). The next challenge is to determine whether we will able to reconstruct such anisotropies in an inverse-problem approach, which will be addressed in ToFu_MatComp and ToFu_Inv.

-
-
- - -
-
-
-
-
-

Table Of Contents

- - -

Previous topic

-

Overview

-

Next topic

-

ToFu_Mesh

-

This Page

- - - -
-
-
-
- - - - \ No newline at end of file diff --git a/_Old_doc/build/html/ToFu_Inv.html b/_Old_doc/build/html/ToFu_Inv.html deleted file mode 100644 index 32d1600a2..000000000 --- a/_Old_doc/build/html/ToFu_Inv.html +++ /dev/null @@ -1,161 +0,0 @@ - - - - - - - - - - ToFu_Inv — ToFu alpha documentation - - - - - - - - - - - - - - - -
-
-
-
- -
-

ToFu_Inv

-

(This project is not finalised yet, work in progress...)

-

ToFu_Mesh, is a ToFu module aimed at handling spatial discretisation of a 3D scalar field in a vacuum chamber (typically the isotropic emissivity of a plasma). Such discretisation is done using B-splines of any order relying on a user-defined rectangular mesh (possibily with variable grid size). It is particularly useful for tomographic inversions and fast synthetic diagnostics.

-

It is designed to be used jointly with the other ToFu modules, in particular with ToFu_Geom and ToFu_MatComp. It is a ToFu-specific discretisation library which remains quite simple and straightforward. However, its capacities are limited to rectangular mesh and it may ultimately be percieved as a much less powerful version of PIGASUS/CAID. Users who wish to use ToFu only for tomographic inversions may find ToFu_Mesh sufficient for thir needs, others, who wish to use a synthetic diagnostic approach, and/or to use ToFu_Mesh jointly with plasma physics codes (MHD...) may prefer using PIGASUSCAID for spatial discreatisation.

-

Hence, ToFu_Mesh mainly provides two object classes : one representing the mesh, and the other one (which uses the latter) representing the basis functions used for discretisation:

- - ----- - - - - - - - - - - - - - - - - - - - - -
The object classes in ToFu_Geom
NameDescriptionInputs needed
IDAn identity object that is used by all ToFu objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...)By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory.
Mesh1D, Mesh2D, Mesh3D1D, 2D and 3D mesh objects, storing the knots and centers, as well as the correspondence between knots and centers in both ways. The higher dimension mesh objects are defined using lower dimension mesh objects. The Mesh 2D object includes an enveloppe polygon. They all include plotting methods and methods to select a subset of the total mesh. The Mesh 3D object is not finished.A numpy array of knots, which can be defined using some of the functions detailed below (for easy creation of linearly spaced knots with chosen resolution).
BaseFun1D, BaseFunc2D, BaseFunc3D1D, 2D and 3D families of B-splines, relying on Mesh1D, Mesh2D, Mesh3D objects, with chosen degree and multiplicity for each dimension. Includes methods for plotting, for determining the support and knots and centers associated to each basis function, as well as for computing 1st, 2nd or 3rd order derivatives (as functions), and local value (summation of all basis functions or their derivatives at a given point and for given weights). Includes methods for computing integrals of derivative operators...A Mesh object of the adapted dimension, and a degree value.
-

The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own mesh and basis functions and access its main characteristics.

-
-

Getting started with ToFu_Mesh

-

Once you have downloaded the whole ToFu package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called Polygon which can be downloaded at ), just start a python interpreter and import ToFu_Geom (we will always import ToFu modules ‘as’ a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called ToFu_PathFile, and matplotlib and numpy will also be useful:

-

The os module is used for exploring directories and the cPickle module for saving and loading objects.

-
-
-

The Tor object class

-

To define the volume of the vacuum chamber, you need to know the (R,Z) coordinates of its reference polygon (in a poloidal cross-section). You should provide it as a (2,N) numpy array where N is the number of points defining the polygon. To give the Tor object its own identity you should at least choose a name (i.e.: a character string). For more elaborate identification, you can define an ID object and give as an input instead of a simple name. You can also provide the position of a “center” of the poloidal cross-section (in 2D (R,Z) coordinates as a (2,1) numpy array) that will be used to compute the coordinates in transformation space any LOS using this Tor object (and the sinogram of any scalar emissivity field using this Tor object). If not provided, the center of mass of the reference polygon is used as a default “center”.

-

In the following, we will use the geometry of ASDEX Upgrade as a example. -We first have to give a reference polygon (‘PolyRef’ below) as a (2,N) numpy array in (R,Z) coordinates.

-

Alternatively, you can store PolyRef in a file and save this file locally, or use one of the default tokamak geometry stored on the ToFu database where Tor input polygons are stored in 2 lines .txt files (space-separated values of the R coordinates on the first line, and corresponding Z coordinates on the second line). Here, we use the default ASDEX Upgrade reference polygon stored in AUG_Tor.txt.

-

We now have created two Tor objects, and ToFu_Geom has computed a series of geometrical characteristics that will be useful later (or that simply provide general information). -TO BE FINISHED !!!!!!!!!!!!!!!

-
-\[\nabla^2 u = \sin(x)\]
-
-
- - -
-
-
-
-
-

Table Of Contents

- - -

Previous topic

-

ToFu_MatComp

-

This Page

- - - -
-
-
-
- - - - \ No newline at end of file diff --git a/_Old_doc/build/html/ToFu_MatComp.html b/_Old_doc/build/html/ToFu_MatComp.html deleted file mode 100644 index f2e4c5517..000000000 --- a/_Old_doc/build/html/ToFu_MatComp.html +++ /dev/null @@ -1,318 +0,0 @@ - - - - - - - - - - ToFu_MatComp — ToFu alpha documentation - - - - - - - - - - - - - - - - -
-
-
-
- -
-

ToFu_MatComp

-

(This project is not finalised yet, work in progress...)

-

ToFu_MatComp, is a ToFu module aimed at computing the geometry matrix associated to a diagnostic geometry from ToFu_Geom and a set of basis functions from ToFu_Mesh. From the first, it requires either a GDetect object or a GLOS object (keep in mind that a GDetect object automatically includes its associated GLOS object), or more simply a list of Detect objects or LOS objects (in case you don’t want to define their group equivalent for any particular reason). From ToFu_Mesh, it requires a BaseFunc2D or BaseFunc3D object.

-

The outupt (i.e.: the computed geometry matrix) can be retrieved directly as a numpy array, or as a ToFu_MatComp object, which includes the array as an attribute and also provides useful methods to quickly explore its main characteristics, as illustrated in the following.

-

Hence, ToFu_MatComp provides the following object classes :

- - ----- - - - - - - - - - - - - - - - - -
The object classes in ToFu_Geom
NameDescriptionInputs needed
GMat2DA geometry matrix computed from a BaseFunc2D object (i.e.: a 2D set of basis functions, assuming the toroidal angle is an ignorable coordinate).A GDetect object or a list of Detect objects, and a BaseFunc2D object.
GMat3DA geometry matrix computed from a BaseFunc3D object (i.e.: a 3D set of basis functions, not implemented yet...).To do...
-

The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to compute your own geometry matrix and access its main characteristics.

-
-

Getting started with ToFu_MatComp

-

To use ToFu_MatComp, you first need to import it as well as ToFu_PathFile. Of course, matplotlib and numpy will also be useful.

-
import numpy as np
-import matplotlib.pyplot as plt
-import ToFu_PathFile as TFPF
-import ToFu_Defaults as TFD
-import ToFu_Geom as TFG
-import ToFu_MatComp as TFMC
-import os
-import cPickle as pck # for saving objects
-RP = TFPF.Find_Rootpath()
-
-
-

The os module is used for exploring directories and the cPickle module for saving and loading objects. -We first need to load a BaseFunc2D object (created using ToFu_Mesh and saved), as well as a GDetect object (created with ToFu_Geom and saved):

-
GD = TFPF.open_object(RP+'/Objects/TFG_GDetect_AUG_SXR_Test_F_2_D20141128_T195755.pck')
-BF0 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D0_D20141202_T230455.pck')
-BF1 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D1_D20141202_T230455.pck')
-BF2 = TFPF.open_object(RP+'/Objects/TFM_BaseFunc2D_AUG_SXR_Rough1_D2_D20141202_T230455.pck')
-
-
-

In the following, we will illustrate the capacities of ToFu_MatComp with the F camera of the SXR diagnostic of ASDEX Upgrade and a relatively coarse 2D mesh with resolution around 2 cm near in the central region and around 6 cm near the edge, on which degree 0 bivariate B-splines have been imposed (we will illustrate later the use of 1st and 2nd order bivariate B-splines).

-

Now we simply need to build the associated geometry matrix. Computation will be done in two steps : first, an index matrix will be computed (a numpy array of boolean) that indicates for each detector which mesh elements it can see (by checking whether they are it is projected viewing cone), this first step typically takes 1-5 min for each detector and helps a lot making the second step faster. The second step consists in proper computing of the integrated contribution of each basis function for each detector. This is obviously longer and typically takes 2-6 min per detector (instead of at least 10 times more without the first step). -Finally, both for comparison purposes and for those users who want to use a pure LOS approach, another geometry matrix is computed with a pure LOS approximation, which is obviously much faster and typically takes 0.01-1 s per detector (as always, it depends on the mesh resolution and basis function degree).

-
GM0 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D0', BF0, GD, Mode='simps')
-GM1 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D1', BF1, GD, Mode='simps')
-GM2 = TFMC.GMat2D('AUG_SXR_F2_Rough1_D2', BF2, GD, Mode='simps')
-
-
-

Now that we have a proper GMat2D object, let us use its built-in methods to explore its properties.

-

First of all, we can plot the total contribution (from all the basis functions) to each detector simply by plotting the sum of the geometry matrix, and comparinmg it to the sum of the LOS-approximated geometry matrix. You can do this manually or use the dedicated built-in method, which also shows the sum in the other dimension (i.e.: the total contribution of each basis function to all detectors):

-
ax1, ax2 = GM0.plot_sum(TLOS=True)
-plt.show()
-
-
-
-Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 0th order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations -

Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 0th order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations

-
-

We can see that there seems to be little difference between the full 3D and the LOS approximated matrices, but let us go a little further into the details by visualising the values of the gometry matrix for a particular chosen detector, and compare it to its LOS-approximated equivalent:

-
axP, axM, axBF = GM0.plot_OneDetect_PolProj(8, TLOS=True)
-axM.set_xlim(400,500)
-plt.show()
-
-
-
-Total contribution of each 0th order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations -

Total contribution of each 0th order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations

-
-

Similarly, we can go the other way around and visualise the values of the geometry matrix for any chosen basis function (and thus see how it contributes to various detectors):

-
axP, axD, axDred = GM0.plot_OneBF_PolProj(450, TLOS=True)
-plt.show()
-
-
-
-Total contribution of a particular 0th order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. -

Total contribution of a particular 0th order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors.

-
-

We can see significant differences when we consider the details of a specific line (or column) of the geometry matrix, which is important because it provides the set of equations that link the basis functions to the measurements. If you want to perform an inversion, you should pay particular attention to this set of equations as the tomography problem hinges on Fredholm integral equations of the first kind, making it an ill-posed problem particularly sensitive to errors both in the measurements and in the equations.

-

In summary, Despite similar sum (i.e. total contribution to each detector), we observe that with the LOS approximation the number of pixels that contribute to the signal is smaller but that their contribution is generally over-estimated as compared to the full 3D computation. If we consider each line of the geometry matrix, this line represents the equation associated to a particular detector measurement f_i:

-
-\[f_i = M_{i,1}b_1 + M_{i,2}b_2 + ... + M_{i,N}b_N\]
-

Our observation then means that both computations give the same sum of terms on the right hand side, but that the LOS approximation tends to give higher values but for a fewer number of terms, thus affecting the spread of the weights on the different terms. This is an important limitation of the LOS approximation when it used to compute a geometry matrix using pixels as basis functions.

-

Now let us consider the same matrix but computed with 1st and 2nd order bivariate B-splines:

-
ax1, ax2 = GM1.plot_sum(TLOS=True)
-axP, axM, axBF = GM1.plot_OneDetect_PolProj(8, TLOS=True)
-axM.set_xlim(400,500)
-axP, axD, axDred = GM1.plot_OneBF_PolProj(450, TLOS=True)
-plt.show()
-
-
-
-Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 1st order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations -

Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 1st order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations

-
-
-Total contribution of each 1st order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations -

Total contribution of each 1st order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations

-
-
-Total contribution of a particular 1st order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. -

Total contribution of a particular 1st order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ignored by the LOS approximation, while in reality it is seen by two detectors.

-
-
ax1, ax2 = GM2.plot_sum(TLOS=True)
-axP, axM, axBF = GM2.plot_OneDetect_PolProj(8, TLOS=True)
-axM.set_xlim(400,500)
-axP, axD, axDred = GM2.plot_OneBF_PolProj(450, TLOS=True)
-plt.show()
-
-
-
-Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 2nd order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations -

Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 2nd order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations

-
-
-Total contribution of each 2nd order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations -

Total contribution of each 2nd order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations

-
-
-Total contribution of a particular 2nd order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. -

Total contribution of a particular 2nd order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ignored by the LOS approximation, while in reality it is seen by two detectors.

-
-

We see that the overlapping of higher-order basis functions ensures a more balanced distribution of the weights computed with a LOS approximation. This, and the fact that the basis functions are more regular, makes higher order basis functions a valuable improvement for tomographic inversions using a geometry matrix computed with a LOS approximatiopn. Obviously a full 3D computation remains even more accurate.

-

Now that the geometry is computed (with whatever method or basis functions), it can be used as in two ways : either as the set of equation necessary for solving the tomographic inversions (see ToFu_Inv), or as a pre-computed intermediate for forward-modelling or synthetic diagnostic (i.e.: reconstructing the measurements assuming an input emissivity field). This method only requires that the chosen basis functions are relevant for the input emissivity (i.e.: don’t use a GMat2D object if the emissivity is not toroidally constant, or if the emissivity is anisotropic). Once you are sure that you have a relevant set of basis functions with their associated geometry matrix, just fit the basis functions to the input emissivity (this will give you the coefficients of each basis function) and use the geometry matrix to get the associated measurements, as illustrated below:

-
Tor2 = GD.Tor
-def Emiss1(Points):
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Z = Points[2,:]
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return 1000.*Val
-
-Coefs0 = BF0.get_Coefs(ff=Emiss1)
-Coefs1 = BF1.get_Coefs(ff=Emiss1)
-Coefs2 = BF2.get_Coefs(ff=Emiss1)
-
-ax1, ax2, ax3, ax4 = GM0.plot_Sig(Coefs=Coefs0, TLOS=True)
-ax1, ax2, ax3, ax4 = GM1.plot_Sig(Coefs=Coefs1, TLOS=True)
-ax1, ax2, ax3, ax4 = GM2.plot_Sig(Coefs=Coefs2, TLOS=True)
-plt.show()
-
-
-
-Synthetic diagnostic using decomposition of an input emissivity on a set of 0th order B-splines, geometry matrix computed with both 3D and LOS approach -

Synthetic diagnostic using decomposition of an input emissivity on a set of 0th order B-splines, geometry matrix computed with both 3D and LOS approach

-
-
-Synthetic diagnostic using decomposition of an input emissivity on a set of 1st order B-splines, geometry matrix computed with both 3D and LOS approach -

Synthetic diagnostic using decomposition of an input emissivity on a set of 1st order B-splines, geometry matrix computed with both 3D and LOS approach

-
-
-Synthetic diagnostic using decomposition of an input emissivity on a set of 2nd order B-splines, geometry matrix computed with both 3D and LOS approach -

Synthetic diagnostic using decomposition of an input emissivity on a set of 2nd order B-splines, geometry matrix computed with both 3D and LOS approach

-
-

This method is faster than the direct, brute-force computation introduced in ToFu_Geom, but is limited by the relevance of the basis functions with respect to the input emissivity. -We can see that the LOS approximation generally gives better results (in a synthetic diagnostic approach) when used with higher-order basis functions (as explained earlier). The difference is visible between 0th and 1st order basis functions (but not so much between 1st and 2nd order basis functions).

-

Furthermore, another general tendency appears : the LOS approximation tends to underestimate the signal for the lines on the High Field Side (HFS) and to overestimate it for the LOS on the Low Field Side (LFS), with respect to the region of maximum emissivity. This is consistent with the fact that the toroidicity induces a general shift towards the LFS. Hence the geometrically optimal LOS (from the center of mass of the detector to the center of mass of the intersection of of all its appertures) is optimal in cartesian coordinates but not in cylindrical coordinates. A different LOS (chosen taking into account the toroidicity, for example by computing the center of mass the viewing cone in (R,theta) coordinates) would probably help solve this issues and would allow you to use a pure LOS approximation with better validity (to do in ToFu_Geom...).

-

Again, the above numerical results are just helpful to understand what’s going on, but keep in mind that the degree of accuracy of the LOS approximation not only depends on the geometry, but also on the input emissivity that you are using (i.e.: large gradients / curvature, toroidal changes, anistropy, localised particular features...).

-
-
-

Limits to the LOS approximation for the geometry matrix computation

-

As we saw, the fact that the final solution is probably compatible with the LOS approximation (i.e. it has ‘reasonable’ curvature across most viewing cones) does not mean that the LOS approximation is valid with the basis functions used to compute it. Indeed, the LOS approximation can basically be written as follows:

-
-\[\begin{split}f_i = \int_{LOS}\iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2Sdl = E_i\int_{LOS}\left<\epsilon^{\eta}\right>dl\\\end{split}\]
-

Where E_i is the etendue of detector i and:

-
-\[\begin{split}\begin{array}{l} - \left<\epsilon^{\eta}\right> = \frac{1}{E_i} \iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2S\\ - E_i = \iint_S\int_{4\pi}\delta_id^2\Omega d^2S -\end{array}\end{split}\]
-

The corresponding LOS-approximated signal would be:

-
-\[\hat{f_i} = E_i\int_{LOS}\epsilon^{\eta}dl\]
-

Hence, the LOS approximation is valid if we can safely assume that the local value of the emissivity on the LOS is a approximation of its value averaged on the surface perpendicular to the LOS:

-
-\[\begin{split}\hat{f_i} \approx f_i \Leftrightarrow \int_{LOS}\left<\epsilon^{\eta}\right>dl \approx \int_{LOS}\epsilon^{\eta}dl\end{split}\]
-

Which can be fulfilled if (but not exclusively if):

-
-\[\begin{split}\forall \ l \ \in \ LOS \ , \ \epsilon^{\eta}\left(l\right) \approx \frac{1}{E_i} \iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2S\left(l\right) = \left<\epsilon^{\eta}\right>\left(l\right)\end{split}\]
-

When applied to a physical (i.e. real) emissivity field, this assumption may hold because the emissivity field usually considered varies sufficiently slowly in the direction perpendicular to the LOS (in the limits of the viewing cone). However, when computing the geometry matrix, this equation is not applied to a ‘physiscal’ emissivity field, but to individual basis functions. In particular, if the nature of these basis functions allows for steep variations across the LOS and in the limits of the viewing cone, then the LOS approximation may need to be questionned.

-

A typical case is pixels (i.e.: 0th order bivariate B-splines), particularly when they have a size too small compared to the local beam width (ref : my thesis + ingesson). In such cases, the above integral, taken for a single pixel (which is what is evaluated in the geometry matrix) can in no way be approximated by the value in the pixel (i.e.: on the LOS).

-

This is a very common mistake : even if the LOS approximation is valid for the final solution, it does not mean it is valid for the basis functions that you are using ! And, ironicaly, using it for computing the geometry matrix anyway will lead to a final solution that will be less regular than it should be because the LOS approximation tends to overestimate the contribution of some pixels and underestimate the contribution of others. Paradoxically, not using the LOS approximation for the geometry matrix is both more physical and leads to solutions which are more likely to be compatible with this LOS approximation ! Again, this depends on the basis functions you are using (nature and size).

-

If you still want to use pixels and the (pure) LOS approximation together, a rule of thumb to limit the bias is to use pixels of a size comparable to the beam width in the region where the signal is maximal (but there will still be situations in which the approximation will not hold, see : my thesis). Another quite common solution is to make your own ‘homemade’ routine to compensate for the beam width (for example with anti-aliasing, with adaptative LOS or with hybrid 1D-2D-3D solutions). Most people have their own tricks to compensate in a way or another.

-

Other solutions are either to use the LOS approximation but with different basis functions such as 1st or 2nd order bivariate B-splines (both because their support overlap and because they are more regular), or ‘simply’ not to use the LOS approximation (but an accurate full 3D computation requires a lot of painful work). Since both these solutions are fully implemented in ToFu, you can start rejoicing and using it :-)

-
-
- - -
-
-
-
-
-

Table Of Contents

- - -

Previous topic

-

ToFu_Mesh

-

Next topic

-

ToFu_Inv

-

This Page

- - - -
-
-
-
- - - - \ No newline at end of file diff --git a/_Old_doc/build/html/ToFu_Mesh.html b/_Old_doc/build/html/ToFu_Mesh.html deleted file mode 100644 index 3397879c4..000000000 --- a/_Old_doc/build/html/ToFu_Mesh.html +++ /dev/null @@ -1,586 +0,0 @@ - - - - - - - - - - ToFu_Mesh — ToFu alpha documentation - - - - - - - - - - - - - - - - -
-
-
-
- -
-

ToFu_Mesh

-

(This project is not finalised yet, work in progress...)

-

ToFu_Mesh, is a ToFu module aimed at handling spatial discretisation of a 3D scalar field in a vacuum chamber (typically the isotropic emissivity of a plasma). Such discretisation is done using B-splines of any order relying on a user-defined rectangular mesh (possibily with variable grid size). It is particularly useful for tomographic inversions and fast synthetic diagnostics.

-

It is designed to be used jointly with the other ToFu modules, in particular with ToFu_Geom and ToFu_MatComp. It is a ToFu-specific discretisation library which remains quite simple and straightforward. However, its capacities are limited to rectangular mesh and it may ultimately be percieved as a much less powerful version of PIGASUS/CAID. Users who wish to use ToFu only for tomographic inversions may find ToFu_Mesh sufficient for thir needs, others, who wish to use a synthetic diagnostic approach, and/or to use ToFu_Mesh jointly with plasma physics codes (MHD...) may prefer using PIGASUSCAID for spatial discreatisation.

-

Hence, ToFu_Mesh mainly provides two object classes : one representing the mesh, and the other one (which uses the latter) representing the basis functions used for discretisation:

- - ----- - - - - - - - - - - - - - - - - - - - - -
The object classes in ToFu_Geom
NameDescriptionInputs needed
IDAn identity object that is used by all ToFu objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...)By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory.
Mesh1D, Mesh2D, Mesh3D1D, 2D and 3D mesh objects, storing the knots and centers, as well as the correspondence between knots and centers in both ways. The higher dimension mesh objects are defined using lower dimension mesh objects. The Mesh 2D object includes an enveloppe polygon. They all include plotting methods and methods to select a subset of the total mesh. The Mesh 3D object is not finished.A numpy array of knots, which can be defined using some of the functions detailed below (for easy creation of linearly spaced knots with chosen resolution).
BaseFun1D, BaseFunc2D, BaseFunc3D1D, 2D and 3D families of B-splines, relying on Mesh1D, Mesh2D, Mesh3D objects, with chosen degree and multiplicity for each dimension. Includes methods for plotting, for determining the support and knots and centers associated to each basis function, as well as for computing 1st, 2nd or 3rd order derivatives (as functions), and local value (summation of all basis functions or their derivatives at a given point and for given weights). Includes methods for computing integrals of derivative operators...A Mesh object of the adapted dimension, and a degree value.
-

The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own mesh and basis functions and access its main characteristics.

-
-

Getting started with ToFu_Mesh

-

Once you have downloaded the whole ToFu package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called Polygon which can be downloaded at ), just start a python interpreter and import ToFu_Geom and ToFu_Mesh (we will always import ToFu modules ‘as’ a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called ToFu_PathFile, and matplotlib and numpy will also be useful:

-
import numpy as np
-import matplotlib.pyplot as plt
-import ToFu_Geom as TFG
-import ToFu_Mesh as TFM
-import ToFu_PathFile as TFPF
-import os
-import cPickle as pck # for saving objects
-import ToFu_Defaults as TFD
-
-
-

The os module is used for exploring directories and the cPickle module for saving and loading objects.

-
-
-

The Mesh1D, Mesh2D and Mesh3D object classes

-

In this section, we describe the Mesh objects starting from the unidimensional to the 3D version.

- - ---- - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a Mesh1D object
AttributeDescription
self.IDThe ID class of the object
self.NCents, self.NKnotsThe number of mesh elements or centers (resp. knots) of the object (typically self.NKnots = self.NCents+1)
self.Cents, self.KnotsThe coordinates of the centers and knots themselves, as two numpy arrays
self.Lengths, self.Length, self.BaryL, self.BaryPThe length of each mesh element, the total length of the mesh and the center of mass of the mesh (i.e.: weight by the respective length of each mesh element), and the barycenter of the self.Cents
self.Cents_Knotsind, self.Knots_CentsindThe index arrays used to get the correspondence between each mesh element (resp, each knot) and its associated knots (resp. its associated mesh elements)
- - ---- - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a Mesh2D object
AttributeDescription
self.IDThe ID class of the object
self.MeshR, self.MeshZThe two Mesh1D objects used to create this Mesh2D object
self.NCents, self.NKnotsThe number of mesh elements or centers (resp. knots) of the object (typically self.NKnots = self.NCents+1)
self.Cents, self.KnotsThe coordinates of the centers and knots themselves, as two numpy arrays
self.Surfs, self.Surf, self.VolAngs, self.VolAng, self.BaryV, self.BaryS, self.BaryL, self.BaryPThe surface of each mesh element, the total surface of the mesh, the volume per unit angle of each mesh element, the total volume per unit angle, the volume barycenter of the mesh (i.e. taking into account not only the surface repartition but also the toroidal geometry), the center of mass of the mesh (i.e.: weight by the respective surface of each mesh element), the middle point (the average between the extreme (R,Z) coordinates) and the barycenter of all the self.Cents
self.Cents_Knotsind, self.Knots_CentsindThe index arrays used to get the correspondence between each mesh element (resp, each knot) and its associated knots (resp. its associated mesh elements)
self.BoundPolyThe boundary polygon of the mesh, useful for fast estimation whether a point lies inside the mesh support or not.
-

In an experiment-oriented perspective, ToFu_Mesh comes with simple functions to help you quickly define an optimal 1D grid, with explicit parametrisation of the spatial resolution on regions of interest. -For example, if you want to define a 1D grid with a 5 cm resolution near the first end, that gradually refines to 1 cm at a given point, stays 1 cm for a given length and is then gradually enlarged to 6 cm at the other end, you just have to feed in the points of interest and their associated resolution to the LinMesh_List function, as a two lists of corresponding (start,end) tuples.

-
#Knots, Res = TFM.LinMesh_List([(1.,1.5),(1.5,1.8),(1.8,2.)], [(0.06,0.02),(0.02,0.02),(0.02,0.08)])#TFM.LinMesh_List([(0.,10.)], [(1.,1.)])
-#print Res
-#print Knots
-
-
-
# [(0.0569230769230769, 0.02), (0.02, 0.02), (0.02, 0.07999999999999999)]
-# [ 1.          1.05692308  1.11076923  1.16153846  1.20923077  1.25384615  1.29538462  1.33384615  1.36923077  1.40153846  1.43076923  1.45692308     1.48        1.5         1.52        1.54        1.56        1.58        1.6  1.62        1.64        1.66        1.68        1.7         1.72        1.74     1.76        1.78        1.8         1.82        1.86        1.92        2.        ]
-
-
-

You can then feed the resulting knots numpy array to the Mesh1D object class and use this object methods to access all the features of interest of the created mesh:

-
M1 = TFM.Mesh1D('M1', Knots)
-ax1 = M1.plot(Elt='KCN')
-ax2 = M1.plot_Res()
-#plt.show()
-
-
-
-Arbitrary 1D mesh with customized resolution in chosen regions -

Arbitrary 1D mesh with customized resolution in chosen regions

-
-
-Local spatial resolution of the created 1D mesh -

Local spatial resolution of the created 1D mesh

-
-

It can seen that the algorithm tried to render a mesh with the required resolution, even though it had to decrease it slightly around the first point, where it is lower than the required 6 cm (this is necessary due to the necessity to the number of mesh elements which must be an integer, thus leading to rounding). This is shown also in the Res variable which returns the actual resolution. -Like for the ToFu_Geom plotting routines, the ‘Elt’ keyword argument provides you with the possibility of choosing what is going to be plotted (the knots ‘K’, the centers ‘C’ and/or the numbers ‘N’).

-

The Mesh2D object class relies on the same basics, except that its multi-dimensional nature means that it has extra methods for easy handling of mesh elements. Let us for example create a coarse 2D mesh using 2 different 1D mesh objects:

-
PolyRef = np.loadtxt(RP + '/Inputs/AUG_Tor.txt', dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2)
-AUG = TFG.Tor('AUG',PolyRef)
-KnotsR, ResR = TFM.LinMesh_List([(AUG.PRMin[0,0],1.5),(1.5,1.75),(1.75,AUG.PRMax[0,0])], [(0.06,0.02),(0.02,0.02),(0.02,0.06)])
-KnotsZ, ResZ = TFM.LinMesh_List([(AUG.PZMin[1,0],-0.1),(-0.1,0.1),(0.1,AUG.PZMax[1,0])], [(0.10,0.02),(0.02,0.02),(0.02,0.08)])
-M2 = TFM.Mesh2D('M2', [KnotsR,KnotsZ])
-#ax = M2.plot(Elt='MBKC')
-#plt.show()
-
-
-
-Arbitrary 2D mesh with customized resolution in chosen regions -

Arbitrary 2D mesh with customized resolution in chosen regions

-
-

The Mesh2D class comes with a method to automatically create another Mesh2D object that can be seen as a sub-mesh (only the elements lying inside an input polygon are kept, the rest being memorized only as ‘Background’). In our example, we can use a specific method of the TFG.Tor object class to create a smooth convex polygon lying inside the Tor enveloppe (see the kwdargs for customization of the smoothing and offset) to concentrate on the region where most SXR radiation comes from:

-
Poly = AUG.get_InsideConvexPoly(Spline=True)
-M2bis = M2.get_SubMeshPolygon(Poly, NLim=2)
-#ax = AUG.plot_PolProj(Elt='P')
-#ax = M2bis.plot(Elt='BM', ax=ax)
-#ax1, ax2, ax3, axcb = M2bis.plot_Res()
-#plt.show()
-
-
-
-Submesh of the 2D mesh with customized resolution in chosen regions with selected elements only (using an input polygon) -

Submesh of the 2D mesh with customized resolution in chosen regions with selected elements only (using an input polygon)

-
-
-Local spatial resolution of the created 2D mesh (both linear and surface) -

Local spatial resolution of the created 2D mesh (both linear and surface)

-
-

Here, the ‘NLim’ kwdarg is used to specifiy how many corners of a mesh element must lie inside the input polygon so that this mesh element can be counted in.

-

Now, the Mesh2D object class provides tools to easily select and plot chosen elements of the 2D mesh. For example, if you want to get the coordinates of the four knots associated to the mesh element number 50, you can use the attribute ‘Centers_Knotsind’ to get them, and then plot them:

-
Knots50 = M2bis.Knots[:,M2bis.Cents_Knotsind[:,50].flatten()]
-print Knots50
-ax = M2bis.plot_Cents(Ind=50, Elt='BMKC')
-#plt.show()
-
-
-
# [[ 1.69230769  1.71153846  1.71153846  1.69230769]
-#  [-0.94421053 -0.94421053 -0.85868421 -0.85868421]]
-
-
-
-Selected mesh element and its associated knots -

Selected mesh element and its associated Knots

-
-

Similarly, you can get and plot all the mesh element centers associated to knots number 160, 655 and 1000:

-
ind = np.array([160,655,1000])
-Cents = M2bis.Cents[:,M2bis.Knots_Centsind[:,ind].flatten()]
-print Cents
-ax = M2bis.plot_Knots(Ind=ind, Elt='BMKC')
-plt.show()
-
-
-
# [[ 1.83922727  1.07454545  1.70192308  1.87418182  1.13548182  1.72115385   1.83922727  1.07454545  1.70192308  1.87418182  1.13548182  1.72115385]
-#  [-0.66452632 -0.05        0.13140693 -0.66452632 -0.05        0.13140693  -0.59428947 -0.03        0.15562771 -0.59428947 -0.03        0.15562771]]
-
-
-
-Selected mesh knots and their associated mesh elements -

Selected mesh knots and their associated mesh elements

-
-

The Mesh3D object class is currently being built... to be finished.

-

Now that we have access to a mesh, we can build basis functions on it. The basis functions available in ToFu_Mesh are all B-splines, as illustrated below.

-
-
-

The BaseFunc1D, BaseFunc2D and BaseFunc3D object classes

-

The use of B-spline allows for more flexibility and more accuracy than the standard pixels (which are B-splines of degree 0). Indeed, most of the tomographic algorithms using series expansion in physical space assess the regularity of the solution by computing the integral of a norm of one of its derivatives. While the use of pixels forces you to use discrete approximations of the derivative operators, the use of B-splines of sufficient degree allows to use an exact formulation of the derivative operators.

-

The attributes of a BaseFunc1D objects are the following:

- - ---- - - - - - - - - - - - - - - - - - - - -
The attributes of a BaseFunc1D object
AttributeDescription
self.IDThe ID class of the object
self.MeshThe Mesh1D object on which the basis functions are built
self.LFunc, self.NFunc, self.Deg, self.BoundThe list of basis functions and the number of basis functions (self.NFunc=len(self.LFunc)), the degree of the basis functions, and the boundary condition (only 0 implemented so far, all points have 1 multiplicity)
self.Func_Centsind, self.Func_Knotsind, self.Func_PMaxAn array giving the correspondence index between each basis function and all its associated mesh centers (and there are methods to go the other way around), its associated mesh knots, and the position of the maximum of each basis function (either oa mesh center or a knot depending on its degree).
-

Other quantities, indices or functions of interest are not stored as attributes, but instead accessible through methods, as will be illustrated in the following:

-

One of the most common issues in SXR tomography on Tokamaks is the boundary constraint that one must enforce at the plasma edge to force the SXR emissivity field to smoothlty decrease to zero in order to avoid artefacts on the tomographic reconstructions. With pixels, this usually has to be done by adding artificial detectors that ‘see’ the edge pixels only and are associated to a ‘measured’ value of zero (and the regularisation process does the rest). With B-splines of degree 2 for example, this constraint can be built-in the basis functions and enforced without having to add any artificial constraint, provided the underlying mesh is created accordingly, as illustrated in the following example, where BaseFunc1D object of degree 2 is created and a method is used to fit its coefficients to an input gaussian-like function:

-
BF1 = TFM.BaseFunc1D('BF1',M1,2)
-FF = lambda xx: np.exp(-(xx-1.5)**2/0.2**2) + 0.4*np.exp(-(xx-1.65)**2/0.01**2)
-Coefs, res = BF1.get_Coefs(ff=FF)
-ax = BF1.plot(Coefs=Coefs, Elt='TL')
-ax.plot(np.linspace(1.,2.,500), FF(np.linspace(1.,2.,500)), c='r', lw=2, label='Ref function')
-#ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF1.png",frameon=None,bbox_inches=0)  # Saving for Tutorial illustration
-
-
-
-1D B-splines of a BaseFunc1D object, with arbitrary coefficients to create a gaussian-like profile -

1D B-splines of a BaseFunc1D object, with arbitrary coefficients to create a gaussian-like profile

-
-

By construction, and because we have only used points with multiplicity equal to one so far, the profile can only decrease smoothly to zero near the edge.

-

The BaseFunc1D object also comes with methods to compute and plot local values its derivatives, or of some operators of interest that rely on derivatives. In particular, the following example shows the plots of the first derivative, the second derivative and a quantity called the Fisher Information that is the first derivative squared and divided by the function value. As usual, the ‘Elt’ kwdarg is used to specify whether we want only the total function (‘T’) or the detail of the list of all the underlying B-splines (‘L’, which is not possible for non-linear operators):

-
ax = BF1.plot(Coefs=Coefs, Deriv='D2', Elt='T',Totdict={'c':'k','lw':2})
-ax = BF1.plot(ax=ax, Coefs=Coefs, Deriv='D1N2', Elt='T',Totdict={'c':'b','lw':2})
-ax.figure.savefig(RP+"/../doc/source/figures_doc/Fig_Tutor_ToFuMesh_BF1_Deriv.png",frameon=None,bbox_inches=0)  # Saving for Tutorial illustration
-
-
-
-Some quantities of interest, based on derivative operators, for the chosen BaseFunc1D object -

Some quantities of interest, based on derivative operators, for the chosen BaseFunc1D object

-
-

This was done using the ‘Deriv’ kwdarg, which can take several values, as shown in the table below:

- - ---- - - - - - - - - - - - - - - - - -
The available values of the ‘Deriv’ keyword argument for a BaseFunc1D object
ValueDescription
0, 1, 2, 3 or ‘D0’, ‘D1’, ‘D2’, ‘D3’Respectively the B-splines themselves (0-th order derivative), the first, second and third order derivative
‘D0N2’, ‘D1N2’, ‘D2N2’, ‘D3N2’The squared norm of the 0th, 1st, 2nd and 3rd order derivatives
‘D1FI’The Fisher Information, which is the squared norm of the 1st order derivative, divided by the function value
-

Keep in mind that we are only using exact derivatives here, so the current version of ToFu_Mesh does not provide discretised operators and you have to make sure that you only compute derivatives for B-splines of sufficiently high degree.

-

Finally, the BaseFunc1D object also comes with methods to compute the value of the integral of the previous operators on the support of the B-spline. When it is possible, another method also returns the matrix that can be used to compute this integral using a vector of coefficients for the B-splines, along with a flag ‘m’ that indicates how the matrix should be used:

-
# Getting integral operators and values
-A, m = BF1.get_IntOp(Deriv='D0')
-Int = BF1.get_IntVal(Coefs=Coefs, Deriv='D0')
-print A.shape, m
-
-
-
print Int
-# (30,) 0
-
-
-

When m==0, it means that A is a vector (Int=A*Coefs), and when m==1, it means A is matrix and the integral requires a square operation (Int=Coefs*A*Coefs). -The following integrals are implemented:

- - ---- - - - - - - - - - - - - - - - - -
The available values of the ‘Deriv’ keyword argument for integral computation
ValueDescription
0 or ‘D0’The integrals of the B-splines themselves (0-th order derivative, integrals of higher order derivatives are all zero)
‘D0N2’, ‘D1N2’, ‘D2N2’, ‘D3N2’The integrals of the squared norm of the 0th, 1st, 2nd and 3rd order derivatives (only 0-th order derivative implemented so far, for Deg=0,1, but not for Deg=2,3)
‘D1FI’The integrated Fisher Information, not implemented so far
-

Finally, you can also plot a series of selected basis functions and there associated mesh elements (useful for detailed analysis and for debugging). Note tht you can also provide a ‘Coefs’ vector if you do not wish to use the default Coefs=1. value for representation.

-
ax = BF1.plot_Ind(Ind=[0,5,8], Elt='LCK')
-plt.show()
-
-
-
-Some selected basis functions and their associated mesh centers and knots -

Some selected basis functions and their associated mesh centers and knots

-
-

All these functionalities are also found in the BaseFunc2D object, which additionally provides specific attributes and methods:

- - ---- - - - - - - - - - - - - - - - - - - - - - - -
The attributes of a BaseFunc2D object
AttributeDescription
self.IDThe ID class of the object
self.MeshThe Mesh2D object on which the basis functions are built
self.LFunc, self.NFunc, self.Deg, self.BoundThe list of basis functions and the number of basis functions (self.NFunc=len(self.LFunc)), the degree of the basis functions, and the boundary condition (only 0 implemented so far, all points have 1 multiplicity)
self.Func_Centsind, self.Func_Knotsind, self.Func_PMaxAn array giving the correspondence index between each basis function and all its associated mesh centers (and there are methods to go the other way around), its associated mesh knots, and the position of the maximum of each basis function (either oa mesh center or a knot depending on its degree).
self.FuncInterFuncAn array containing indices of all neighbouring basis functions of each basis function (neighbouring in the sense that the intersection of their respective supports is non-zero)
-

Due to its 2D nature, the BaseFunc2D object class is also equiped with methods to get the support (self.get_SuppRZ) and quadrature points (self.get_quadPoints) of each basis function.

-

Like the BaseFunc1D object, it provides a method for a least square fit of an input function. In the following example, the coefficients are determined using this method and then fed to various plotting methods used to visalise the function itself or some of its derivatives:

-
BF2 = TFM.BaseFunc2D('BF2',M2bis,1)                                                                                                             # Defining the BaseFunc2D object
-"""
-PathFile = RP + '/Inputs/AUG_Tor.txt'
-PolyRef = np.loadtxt(PathFile, dtype='float', comments='#', delimiter=None, converters=None, skiprows=0, usecols=None, unpack=False, ndmin=2)
-Tor2 = TFG.Tor('AUG',PolyRef)                                                                                                                   # Defining the Tor object for the Emiss function definition
-def Emiss(Points):                                                                                                                              # Definition of the inpout Emiss function
-    R  = np.sqrt(Points[0,:]**2+Points[1,:]**2)
-    Z = Points[2,:]
-    Val = np.exp(-(R-1.68)**2/0.20**2 - (Z-0.05)**2/0.35**2) - 0.50*np.exp(-(R-1.65)**2/0.08**2 - (Z-0.05)**2/0.15**2)
-    ind = Tor2.isinside(np.array([R,Z]))
-    Val[~ind] = 0.
-    return Val
-
-ax1, ax2 = BF2.plot_fit(ff=Emiss)                                                                                                               # Plotting the fitted function
-"""
-Coefs, res = 1.,0#BF2.get_Coefs(ff=Emiss)                                                                                                            # Extracxting the coefficients corresponding to the fitted function
-"""
-f, axarr = plt.subplots(2,4, sharex=True, facecolor="w" ,figsize=(20,13))
-ax = BF2.plot(ax=axarr[0,0], Coefs=Coefs,Deriv='D1', DVect=TFD.BF2_DVect_Def)                                                                   # Plotting the gradient scalar vertical vector (Z-component)
-ax.axis("equal"), ax.set_title("D1-Z")
-ax = BF2.plot(ax=axarr[1,0], Coefs=Coefs,Deriv='D1', DVect=TFD.BF2_DVect_Defbis)                                                                # Plotting the gradient scalar horizontal vector (R-vector)
-ax.axis("equal"), ax.set_title("D1-R")
-ax = BF2.plot(ax=axarr[0,1], Coefs=Coefs,Deriv='D1N2')                                                                                          # Plotting the squared norm of the gradient
-ax.axis("equal"), ax.set_title("D1N2")
-ax = BF2.plot(ax=axarr[1,1], Coefs=Coefs,Deriv='D1FI')                                                                                          # Plotting the local fisher information
-ax.axis("equal"), ax.set_title("D1FI")
-ax = BF2.plot(ax=axarr[0,2], Coefs=Coefs,Deriv='D2Lapl')                                                                                        # Plotting the laplacian
-ax.axis("equal"), ax.set_title("D2Lapl")
-ax = BF2.plot(ax=axarr[1,2], Coefs=Coefs,Deriv='D2LaplN2')                                                                                      # Plotting the squared norm of the laplacian
-ax.axis("equal"), ax.set_title("D2LaplN2")
-ax = BF2.plot(ax=axarr[0,3], Coefs=Coefs,Deriv='D2Gauss')                                                                                       # Plotting the Gaussian curvature of the surface
-ax.axis("equal"), ax.set_title("D2Gauss")
-ax = BF2.plot(ax=axarr[1,3], Coefs=Coefs,Deriv='D2Mean')                                                                                        # Plotting the Mean curvature of the surface
-ax.axis("equal"), ax.set_title("D2Mean")
-plt.show()
-
-
-
-Input 2D emissivity model and fitted BaseFunc2D -

Input 2D emissivity model and fitted BaseFunc2D

-
-
-Series of derivatives or local quantities of interest of the fitted BaseFunc2D object -

Series of derivatives or local quantities of interest of the fitted BaseFunc2D object

-
-

Like for the BaseFunc1D object, and in order to facilitate detailed analysis and possibly debugging, you can also plot the key points, support and value of some selected basis functions of your choice:

-
ax = BF2.plot_Ind(Ind=[200,201,202, 300,301,302, 622,623,624,625,626, 950], Elt='L', EltM='M', Coefs=Coefs)                                         # Plotting local basis functions values and mesh
-ax = BF2.plot_Ind(Ind=[200,201,202, 300,301,302, 622,623,624,625,626, 950], Elt='SP', EltM='MCK', Coefs=Coefs)                                      # Plotting local basis functions support and PMax and mesh with centers and knots
-plt.show()
-
-
-
-Local values of the selected local basis functions, with the underlying mesh -

Local values of the selected local basis functions, with the underlying mesh

-
-
-Support and PMax of the selected local basis functions, with the underlying mesh and centers and knots associated to the selected local basis functions -

Support and PMax of the selected local basis functions, with the underlying mesh and centers and knots associated to the selected local basis functions

-
-

Finally, you can access values and operators of interest regarding some integrated quantities like the squared norm of the gradient, the squared laplacian (to be finished)...

-
print BF2.Mesh.Surf
-print "Int radiation : ", BF2.get_IntVal(Deriv='D0', Coefs=1.)
-print "Int sq. gradient : ", BF2.get_IntVal(Deriv='D1N2', Coefs=Coefs)
-# 1.69963173663
-# Surf :
-
-
-
# Vol :
-# Surf :
-# Vol :
-
-
-

The following table lists the operators which are available in ToFu_Mesh, depending on the value of the kwdarg ‘Deriv’:

- - ---- - - - - - - - - - - - - - - - - -
The available values of the ‘Deriv’ keyword argument for integral computation
ValueDescription
0 or ‘D0’The integrals of the B-splines themselves (integrals of higher order derivatives are all zero)
‘D0N2’, ‘D1N2’, ‘D2N2’, ‘D3N2’The integrals of the squared norm of the 0th, 1st, 2nd and 3rd order derivatives (only 0-th order derivative implemented so far, for Deg=0,1, but not for Deg=2,3)
‘D1FI’The integrated Fisher Information, not implemented so far
-
-
- - -
-
-
-
-
-

Table Of Contents

- - -

Previous topic

-

ToFu_Geom

-

Next topic

-

ToFu_MatComp

-

This Page

- - - -
-
-
-
- - - - \ No newline at end of file diff --git a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_General.png b/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_General.png deleted file mode 100644 index 4d39be483..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_General.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_SynthDiag.png b/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_SynthDiag.png deleted file mode 100644 index 0687f869b..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_SynthDiag.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_Tomo.png b/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_Tomo.png deleted file mode 100644 index 845d38323..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_BigPicture_Tomo.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_3D.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_3D.png deleted file mode 100644 index 0c2ab9c0a..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_3D.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj.png deleted file mode 100644 index 5af38adf6..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj_Cone.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj_Cone.png deleted file mode 100644 index 348df05b9..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_AllProj_Cone.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS.png deleted file mode 100644 index da43b1f7b..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS_Extend.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS_Extend.png deleted file mode 100644 index b5518f353..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS_Extend.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_Imp.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_Imp.png deleted file mode 100644 index 5768c9d5e..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_Imp.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj.png deleted file mode 100644 index 09a2f608f..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj_NoColis.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj_NoColis.png deleted file mode 100644 index eb02c5a50..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolProj_NoColis.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolSlice.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolSlice.png deleted file mode 100644 index 65f7f7d4c..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngPolSlice.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj.png deleted file mode 100644 index 3c4ee34e1..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj_NoColis.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj_NoColis.png deleted file mode 100644 index 3e9da9e86..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorProj_NoColis.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorSlice.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorSlice.png deleted file mode 100644 index ae7050641..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SAngTorSlice.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SolAngPlane.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SolAngPlane.png deleted file mode 100644 index a2c0e4dd7..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SolAngPlane.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SynthDiag.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SynthDiag.png deleted file mode 100644 index 8a6439aa1..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Detect_SynthDiag.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjC.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjC.png deleted file mode 100644 index 517620cde..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjC.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjL.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjL.png deleted file mode 100644 index bfd07f6ed..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProjL.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProj_F019.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProj_F019.png deleted file mode 100644 index 21ebe5ea1..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_AllProj_F019.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Etend.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Etend.png deleted file mode 100644 index 588318b34..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Etend.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Impact.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Impact.png deleted file mode 100644 index d357cd999..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Impact.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig1.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig1.png deleted file mode 100644 index d62ce9def..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig1.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig2.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig2.png deleted file mode 100644 index 54278c3cf..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig2.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig3.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig3.png deleted file mode 100644 index a4638b2d7..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_Sig3.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_SigZ.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_SigZ.png deleted file mode 100644 index a45181c0a..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GDetect_SigZ.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_AllProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_AllProj.png deleted file mode 100644 index 34718d336..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_AllProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_ImpPol.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_ImpPol.png deleted file mode 100644 index c350b3156..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_GLOS_ImpPol.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_3D.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_3D.png deleted file mode 100644 index 55f390650..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_3D.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_AllProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_AllProj.png deleted file mode 100644 index 784dd1e12..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_AllProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_ImpPol.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_ImpPol.png deleted file mode 100644 index 6dae42ef1..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_LOS_ImpPol.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_3D.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_3D.png deleted file mode 100644 index 8b197c95c..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_3D.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProj.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProj.png deleted file mode 100644 index 6e6096983..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProj.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProjAndVect.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProjAndVect.png deleted file mode 100644 index 8a9376f0a..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_AllProjAndVect.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp.png deleted file mode 100644 index 9fc2c2eef..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp3.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp3.png deleted file mode 100644 index 256463101..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuGeom_Tor_Imp3.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_BF.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_BF.png deleted file mode 100644 index a698bdb11..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_BF.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Detect.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Detect.png deleted file mode 100644 index 727ca2cff..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Detect.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sig.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sig.png deleted file mode 100644 index 0d6a6243a..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sig.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sum.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sum.png deleted file mode 100644 index 85c4049e6..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM0_Sum.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_BF.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_BF.png deleted file mode 100644 index f98a5c0aa..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_BF.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Detect.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Detect.png deleted file mode 100644 index fc1b7d0fb..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Detect.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sig.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sig.png deleted file mode 100644 index b8fc9b6c9..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sig.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sum.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sum.png deleted file mode 100644 index e10aac2e6..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM1_Sum.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM2_Sig.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM2_Sig.png deleted file mode 100644 index c9ddff45c..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMatComp_GM2_Sig.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1.png deleted file mode 100644 index f4ffaf090..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Deriv.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Deriv.png deleted file mode 100644 index f48369b6b..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Deriv.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Select.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Select.png deleted file mode 100644 index 0b28cb398..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF1_Select.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2.png deleted file mode 100644 index 62714ade9..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Deriv.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Deriv.png deleted file mode 100644 index 2bb83e065..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Deriv.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int1.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int1.png deleted file mode 100644 index e58f912b8..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int1.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int2.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int2.png deleted file mode 100644 index fea6cd966..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_BF2_Int2.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1.png deleted file mode 100644 index 4623abcba..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1_Res.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1_Res.png deleted file mode 100644 index 73b3e7f85..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M1_Res.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2.png deleted file mode 100644 index 0101de3e7..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Cents.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Cents.png deleted file mode 100644 index 09f4f5496..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Cents.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Knots.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Knots.png deleted file mode 100644 index 70725269d..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Knots.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Raw.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Raw.png deleted file mode 100644 index f6fd9d538..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Raw.png and /dev/null differ diff --git a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Res.png b/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Res.png deleted file mode 100644 index e22e9e13b..000000000 Binary files a/_Old_doc/build/html/_images/Fig_Tutor_ToFuMesh_M2_Res.png and /dev/null differ diff --git a/_Old_doc/build/html/_sources/ToFu_Geom.txt b/_Old_doc/build/html/_sources/ToFu_Geom.txt deleted file mode 100644 index 59f6ac103..000000000 --- a/_Old_doc/build/html/_sources/ToFu_Geom.txt +++ /dev/null @@ -1,845 +0,0 @@ -.. role:: envvar(literal) -.. role:: command(literal) -.. role:: file(literal) -.. role:: ref(title-reference) - - -**ToFu_Geom** -============= - -(This project is not finalised yet, work in progress...) - -**ToFu_Geom**, is the first ToFu-specific module, it is dedicated to handling the 3D geometry of the diagnostic of interest. It defines 6 objects classes and many functions used as objects methods. It resorts to a module called **General_Geom**, which is not ToFu-specific (i.e.: it mostly conatins functions and has no reference to ToFu objects), which should be entirely re-written using Cython for faster computation. As all the other ToFu-specific modules, **ToFu_Geom** not only defines computing methods but also a variety of plotting methods that can be used to visualise various aspects and characteristics of the diagnostics as well as for debugging. -This section will first give a general presentation of the **ToFu_Geom** module and will then give a tutorial for building your own diagnostic. - -**ToFu** is designed for handling passive radiation detectors (e.g.: bolometer foils, semi-conductor diodes or gas detectors), which can be placed behind an arbitrary number of collimating apertures of any shape and orientation. This goes also for the detector, represented by its active surface (the only constraint for apertures and detectors - in the current version - is that each must be represented by a planar polygon, but they do not have to be co-planar). Each detector is thus associated to a list of apertures through which it "sees" a certain volume. The volume of interest is limited, in the case of a Tokamak, to a chamber (i.e.: the vacuum vessel) represented in **ToFu** by a toroid, itself defined by a reference 2D polygon (usually the best possible representation of the inner walls of the Tokamak) which is then expanded toroidally. The volume "seen" by each detector is then the fraction of the toroid that it can "see" directly through its various apertures. On most fusion devices, such passive radiation detectors are located in a poloidal cross-section and arranged so that their cone of vision is very thin, such that it can be represented by a simple line (called a Line Of Sigh, or LOS) and an etendue. **ToFu_Geom** allows for a full 3D description of the whole system, and also for an accurate computation of the geometrically optimal LOS and its associated etendue value. Hence, it is possible to do everything with the two approaches (full 3D or LOS) and quatify the error due to the LOS approximation, if any. - -This short introduction gives the key points addressed by **ToFu_Geom**, which can be summarized by listing the 7 object classes and their meaning : - -.. list-table:: The object classes in **ToFu_Geom** - :widths: 10 30 20 - :header-rows: 1 - - * - Name - - Description - - Inputs needed - * - ID - - An identity object that is used by all **ToFu** objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...) - - By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory. - * - Tor - - The limits of the toroidal chamber - - A 2D polygon in (R,Z) coordinates - * - LOS - - A LOS, can be defined by the user for tests, but usually defined by the Detect object as an output - - A Tor object, a starting point and a unitary vector indicating the direction of observation (the end point is computed), both in 3D (X,Y,Z) coordinates - * - GLOS - - A group of LOS objects, with a name (useful for defining cameras which are sets of detectors with a common aperture and a commom name) - - A list of LOS objects - * - Aperture - - An aperture, represented by a planar polygon - - A Tor object and a planar polygon in 3D (X,Y,Z) coordinates - * - Detect - - A detector, represented by its planar active surface, computed a geometrically optimal LOS as an output - - A Tor object, a planar polygon in 3D (X,Y,Z) coordinates, and a list of Aperture objects - * - GDetect - - A group of Detect objects, useful for defining cameras - - A list of Detect objects - -The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own diagnostics and access its main geometrical characteristics (the will be computed automatically). - -Getting started with ToFu_Geom ------------------------------- - -Once you have downloaded the whole **ToFu** package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called **Polygon** which can be downloaded at http://www.j-raedler.de/projects/polygon/, just start a python interpreter and import **ToFu_Geom** (we will always import **ToFu** modules 'as' a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called **ToFu_PathFile**, and **matplotlib** and **numpy** will also be useful: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 7-12 - -The os module is used for exploring directories and the cPickle module for saving and loading objects. - -The Tor object class --------------------- - -To define the volume of the vacuum chamber, you need to know the (R,Z) coordinates of its reference polygon (in a poloidal cross-section). You should provide it as a (2,N) numpy array where N is the number of points defining the polygon. To give the Tor object its own identity you should at least choose a name (i.e.: a character string). For more elaborate identification, you can define an ID object and give as an input instead of a simple name. You can also provide the position of a "center" of the poloidal cross-section (in 2D (R,Z) coordinates as a (2,1) numpy array) that will be used to compute the coordinates in transformation space any LOS using this Tor object (and the sinogram of any scalar emissivity field using this Tor object). If not provided, the center of mass of the reference polygon is used as a default "center". - -In the following, we will use the geometry of ASDEX Upgrade as a example. -We first have to give a reference polygon ('PolyRef' below) as a (2,N) numpy array in (R,Z) coordinates. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 21-26 - - -Alternatively, you can store PolyRef in a file and save this file locally, or use one of the default tokamak geometry stored on the **ToFu** database where Tor input polygons are stored in 2 lines .txt files (space-separated values of the R coordinates on the first line, and corresponding Z coordinates on the second line). Here, we use the default ASDEX Upgrade reference polygon stored in AUG_Tor.txt. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 29-33 - -We now have created two Tor objects, and **ToFu_Geom** has computed a series of geometrical characteristics that will be useful later (or that simply provide general information). -In particular, we have access to the following attributes : - -.. list-table:: The attributes of a Tor object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the Tor object - * - self.Poly - - The reference polygon used to create the Tor object, as a (2,N) numpy array, where N is the number of points (the last one being identical to the first one) - * - self.BaryP - - The barycenter of self.Poly - * - self.Surf - - The surface of self.Poly - * - self.BaryS - - The center of mass of self.Poly - * - self.Vect - - The 2D vectors representing the edges of self.Poly as a (2,N) numpy array - * - self.Vin - - The normalised 2D vectors oriented towards the inside of self.Poly for each edge - * - self.PRMin, self.PRMax - - The points of self.Poly with the maximum (resp. minimum) R coordinate, as a (2,1) numpy array (one for PRMin, one for PRMax) - * - self.PZMin, self.PZMax - - The points of self.Poly with the maximum (resp. minimum) Z coordinate, as a (2,1) numpy array (one for ZPMin, one for PZMax) - * - self.ImpRZ - - The (R,Z) coordinates of the point used for computing the impact factor (i.e. the coordinates in projection space(lien)) of the LOS objects using this Tor and of the enveloppe of this Tor (default is self.BaryS) - * - self.Imp_EnvTheta - - The discretized values used for computing the enveloppe of Tor in projection space (where theta is in [0,pi], (lien)) - * - self.Imp_EnvMinMax - - The enveloppe of Tor in projection space (lien) (i.e.: the - algebraic - minimum and maximum impact factor of the reference polygon for each value of self.Imp_EnvTheta) - - -In addition to these attributes, the Tor object has a number of built-in methods that can be used to visualise its characteristics. As in the whole **ToFu** package, the object methods used for plotting always begin with "self.plot...", where the name of the method after "plot..." is relatively explicit. All the plotting methods are based on matplotlib, and in order to allow for flexibility and customization, you can either pass as input an already existing matplotlib axes on which to plot, or use a predefined default axes (simply by not specifying any axes). Similarly, extensive use of keyword argumants with default values is made, thus all plotting options are customizable since you can pass a dictionnary for element to be plotted (see the detailed documentation of each method to know which kwarg to use for which element). - -As an example, you can plot the reference polygon of ASDEX Upgrade in both a poloidal and a toroidal projection, using the default axes (defined in **ToFu_Geom**) : - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 36,38 - -.. figure:: /figures_doc/Fig_Tutor_ToFuGeom_Tor_AllProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and Toroidal projections of the reference polygon of ASDEX Upgrade - :align: center - - Poloidal and Toroidal projections of the reference polygon of ASDEX Upgrade - -Here we used the keyword argument 'Elt' to specify which elements which wanted to plot. We provided a string in which each letter is a code for an element. Here 'P' stands for the reference polygon and 'I' for the point used for computing the impact parameter of the enveloppe. -We can then re-use the axes of the poloidal projection to plot the vectors defining the edges and the inner side of the reference polygon: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 40,42 - -(for some mysterious reason it is not working on my Linux station, but it does work on my macbook, as it should) - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Tor_AllProjAndVect.png - :height: 400px - :width: 800 px - :scale: 100% - :alt: Poloidal projections of the vectorised reference polygon of ASDEX Upgrade - :align: center - - Vector representation of the reference polygon of ASDEX Upgrade - -We can also plot a 3D representation of the reference polygon, and specify that we only want to plot a fraction of it, between pi/4 and 7pi/4: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 44,46 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Tor_3D.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: 3D fraction of the reference polygon of ASDEX Upgrade - :align: center - - 3D representation of the reference polygon of ASDEX Upgrade - -We can also visualise the enveloppe of ASDEX Upgrade in the projection space (lien), in 2D or 3D, with a color of our choosing : - - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 48,49,52 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Tor_Imp.png - :height: 400px - :width: 800 px - :scale: 80 % - :alt: Representation in projection space of the reference polygon of ASDEX Upgrade - :align: center - - Representation in projection space of the reference polygon of ASDEX Upgrade - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Tor_Imp3.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Representation in porojection space of the reference polygon of ASDEX Upgrade in 3D - :align: center - - Representation in porojection space of the reference polygon of ASDEX Upgrade in 3D, this representation may be usefull when there are LOS which are not contained in a poloidal cross-section, as we will see later - -Feel free to explore the various keyword arguments of each method. -This Tor object can then be used as a limit to the volume that can be detected by each LOS or Detect object. - - -The LOS object class --------------------- - -Since most tomography users in the fusion community are familiar with the LOS approximation (which gives satisfactory results in most usual situations), we choose to provide in **ToFu** the two extremes of the spectrum : a pure LOS approximation, and a full 3D approach. Any attempt to compute the geometry matrix with an "advanced" or "improved" LOS approximation (i.e.: taking into account finite beam width, using anti-aliasing techniques with pixels...) can be considered to fall somewhere between these two extremes, and since every user has his own recipes, we do not provide any except the two extreme approaches. Obviously, all users can download **ToFu** and add there own recipe in their local version (this should be done in the **ToFu_MatComp** module). Hence, a pure LOS object exists in **ToFu**, and can be defined with minimum knowledge of the diagnostics : only a point (D) and a unitary vector (u) are necessary for each LOS. The unitary vector shall be pointing towards the direction of observation (i.e.: towards the interior of the vacuum chamber). -nce a LOS id defined, **ToFu** automatically computes a series of points of interest. Indeed, if a Tor object is provided to the LOS object, we can determine the first point of entry into the Tor volume (PIn), and the point were the LOS gets out of it (POut). We can also determine the point on the LOS with minimum R-coordinate (PRMin, which is usually PIn or POut except when the LOS has a strong toroidal inclination, in which case PRMin is somewhere in the middle of the LOS). If the LOS object has a RZImp (by default the RZImp of the associated Tor object), then the impact parameter of the LOS with respect to this RZImp can be computed (has well as its two associated angles), and the LOS can be represented in projection space. - -Hence, a LOS object has the following attributes : - -.. list-table:: The attributes of a LOS object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.D - - The starting point of the LOS in 3D (X,Y,Z) coordinates (usually the center of mass of the associated detector or the center of mass of the collimating slit for fan-like cameras) - * - self.u - - The unitary direction vector in 3D (X,Y,Z) coordinates (oriented towards the interior of the associated Tor) - * - self.Tor - - The associated Tor object - * - self.PIn, self.POut, self.PRMin - - The particular points, in 3D (X,Y,Z) coordinates, on the LOS associated to the Tor enveloppe : the point at which the LOS enters the volume, the point at which it exits it, and the point of minimum R-coordinate - * - self.kPIn, self.kPOut, self.kPRMin - - The length on the LOS (from self.D) of self.PIn, self.POut and self.PRMin - * - self.RMin - - The value of the R-coordinate of self.PRMin - * - self.PolProjAng - - An estimate of the angle by which the LOS is distorted in its poloidal projection. Value of 0 means the LOS is already in a poloidal cross-section and remains a straight line. - * - self.PplotOut, self.PplotIn - - The points along the LOS used to plot its poloidal projection, either the whole LOS (self.PplotOut, from self.D to self.POut) or only the part which is inside the Tor volume (self.PplotIn, from self.PIn to self.POut) - * - self.ImpRZ - - The 2D (R,Z) coordinates used to compute the impact factor of the LOS (i.e. its coordinates in projection space), by default self.ImpRZ = self.Tor.ImpRZ - * - self.ImpP, self.ImpPk, self.ImpPr, self.ImpPTheta - - The point on the LOS which is closest to the self.ImpRZ (the "impact point") is self.ImpP, and its distance from self.D ((self.ImpP-self.D).(self.u) = self.ImpPk) is self.ImpPk. Its small (geometric) radius from self.ImpRZ is self.ImpPr and its toroidal angle is self.ImpPTheta - * - self.Impp, self.Imptheta, self.Impphi - - The coordinates of the LOS in projection space, where self.Impp is the (positive or negative) impact parameter, self.Imptheta is the projection angle in a poloidal cross-section and self.Impphi is the deviation angle from the poloidal crosss-section (the reference poloidal cross-section being the one which includes self.ImpP). - - -Defining a LOS object only requires a start point and a unitary vector indicating the viewing direction (both in 3D (X,Y,Z) coordinates), as well as an associated Tor object. -As an example, we can define Los, a LOS object as follows: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 56-60 - -Note that if you define a LOS objects that does not intersect the Tor volume, you will get an error message telling you that the code could not find a PIn or POut point (both are necessary). -All the geometric characteristics of Los has now been computed (the coordinates in projection space have been computed using the center of mass of the reference polygon of Tor2 as default, but they can be re-computed with another reference point, as we will see later). The built-in routines can be used to visualise Los, and we specify, thanks to the keyword argument 'PDIOR' that we not only want to see the LOS itself but also the position of the particular points that were computed or that were used for its definition (self.D, self.PIn, self.POut and self.PRMin => DIOR). In order to better visualise it, we plot it both in poloidal and toroidal projections, re-using a set of axes on which we first plot Tor2: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 62-63,65 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_LOS_AllProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projection of a user-defined LOS object, with points of interest and parent Tor object - :align: center - - Poloidal and toroidal projection of a user-defined LOS object, with points of interest and parent Tor object - -We used again here the 'Elt' keyword argument to specify that we want to plot the LOS itself ('L'), the particular points self.D ('D'), self.PIn ('I'), self.POut('O'), self.PRMin ('R') and self.ImpP ('r'). -In fact, since the LOS object has a Tor attribute, the possibly of plotting the Tor object at the same time as the LOS object is provided in the same method, through the kwdarg 'EltTor' (just provide the same letters as for a Tor object). - -Like for the Tor object, the LOS object can also be plotted in 3D using the plot_3D_plt() method: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 67,69 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_LOS_3D.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: 3D plot of a user-defined LOS object, with points of interest and parent Tor object - :align: center - - 3D plot of a user-defined LOS object, with points of interest and parent Tor object - -Where we have plotted the associated Tor object using the kwdarg 'EltTor' and changed the dictionaries for the self.PRMin and self.ImpP points. -But generally matplotlib is not the best library for 3D plots with several objects, hence, mayavi is currently being considered for implementation since it is much more adapted to this particular task. - -Also, the coordinates of Los in projection space can be plotted on the same graph as the enveloppe of Tor2 was plotted, in 2D or 3D (3D being relevant to take into account the fact that Los does not lie in a poloidal cross section). Beware that these coordinates depend on the reference point chosen. To illustrate this, we compute the impact parameter of Los with the default reference point (i.e.: the center of mass of its associated Tor object) in blue and with a different, arbitrary, reference point in red: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 71-76,78 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_LOS_ImpPol.png - :height: 400px - :width: 800 px - :scale: 80 % - :alt: Coordinates in projection space of Los, with respect to two different reference points - :align: center - - Coordinates in projection space of Los, with respect to two different reference points - -N.B.: In fact the enveloppe of Tor2 changes also when we change the reference point, but only the first enveloppe is displayed here for clarity. -Now we know how to build a LOS object, get its main geometrical characteristics and plot it. But most tomography diagnotics rely on tens or hundreds of different LOS which, in the fusion community, are usually grouped in what is called "cameras". A "camera" is typically a set of several detectors which share a common aperture in a fan-like arrangement, which is a good compromise between room saving (access is scarce around Tokamaks) and good geometrical coverage. Hence, a LOS can be defined for each detector as the line that runs through its center of mass and through the center of mass of its associated aperture. The fan-like arrangement means that all LOS belonging to the same camera will cross at their common aprture. To this purely goemterical consideration, one must add the data acquisition system which often treats data from a camera as a group of signals identified by a common nomenclature. -For these reasons, it is useful to define an object embodying the notion of "camera", which is simply done by the GLOS object class. - - - -ToDo : add visualisation options for for the reference points and LOS.ImpP in physical space (add kwdarg in existing functions) - - -The GLOS object class ---------------------- - -The GLOS object class (where GLOS stands for Group of Lines Of Sight) is simply a list of LOS objects with a common ID class (i.e.: a common name and other identity features). It is useful for fast and easy handling of a large number of LOS. - -.. list-table:: The attributes of a GLOS object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.LLOS, self.nLOS - - The list of LOS objects contained in this group, and the number of LOS (self.nLOS = len(self.LLOS)) - * - self.Tor - - The Tor object common to all LOS of self.LLOS - -The methods of a GLOS object can be seperated in two categories. First, all the LOS objects methods are reproduced in such a way as to handle all the LOS contained in GLOS (for example with "for" loops). These include in particular the plotting methods. Second, some methods are provided to facilitate selection of sub-sets of LOS in the GLOS objects and handle them. -For example, we can create two cameras of respectively 10 amd 15 LOS: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 81-90 - -We can then plot their poloidal and toroidal projections (without the particular points) , on top of the reference polygon of Tor2: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 93-94,96 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GLOS_AllProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of two arbitrary cameras, with differenmt plotting options - :align: center - - Poloidal and toroidal projections of two arbitrary cameras, with differenmt plotting options - -Notice here that we used the keyword argument "LPlot" to specify that the LOS of the first camera should only be plotted inside the Tor volume (i.e.: from PIn to POut) whereas tyhe default is to plot the entire LOS (Lplot='Tot', which plots the LOS from D to POut). We also used the "Ldict" kwdarg to specify a dictionnary for the plotting command. - -Like the LOS objects, a GLOS object enables you to plot the coordinates in projection space of all the LOS it contains (in 2D or 3D): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 99-101,103 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GLOS_ImpPol.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Coordinates in projection space of the two cameras, with the Tor enveloppe - :align: center - - Coordinates in projection space of the two cameras, with the Tor enveloppe - -In adition to these methods which echo the methods of the LOS class, the GLOS class offers tools to select subsets of the list of LOS from which it was created. -This is were the relevance of the ID class starts to show, indeed, besides the Name that you gave to your objects, you might want to store data which is both specific to these objects and to your needs or to the naming conventions of your laboratory. For example, you might want to know enter the signal code associated to each detector, or the age of each detector (to have an idea of the effect of ageing on its performance)... Hence, when you create an object - like a LOS for example - you can add extra attributes to its ID class. These attributes are anything that you consider helpful to identify / discriminate a particular object. In the following example, we re-create a camera, but we add the code signal ("Signal" + number of LOS) and age (between 0 and 3 years) of each detector to the ID class of its LOS: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 106-108 - -We can now ask the GLOS object to give us a list of its LOS that match a criteria of our choosing. -There are two methods to do this. They take the same arguments, but the first one will return a numpy array of boolean indices (for later use if you need it), while the second one directly returns a list of LOS objects (and uses the first one). For example, we use the first one to get the indices of LOS with a signal code equal to "Signal0" or "Signal50", and the second one to get a list of LOS aged less than a year: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 109-111 - -The flexibility is provided through the use of eval() which allows for string expressions. -These methods are intended to provide the necessary flexibility for quick adaptation to your specific needs. Depending on return on experience, it may also evolve or be developped further. -Alternatively, you can also build a list of the attributes you are interest in and then use the list() methods to get the indices you want: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 113-115 - -As was already said, **ToFu** provides you with the possibility of defining and using LOS if you wish, however, its main features reside in the 3D description of the diagnostic, of which the LOS description is just an approximation (which can be sufficient for your needs, depending on the geometry of your system, on the physics at play and on the accuracy that you want to acheive). -In the following, we introduce the Detect class which is used to handle the 3D geometry of the problem. Once a Detect object is created, it can be associated to several Aperture objects to determine its 3D field of view. Consequently, the geometrically optimal LOS can also be computed and the associated LOS object can be easily produced on demand, we then generally advise to directly create Detect objects, of which LOS objects can be seen as a subproduct. - - -The Detect and Apert object classes ------------------------------------ - -In addition to what as been said above, creating dierctly a Detect object instead of a LOS object will provide you with the ability to compute an accurate value of the etendue associated to the LOS approximation (link to definition of etendue and why it is important for proper use of LOS approximation). -In its current version, **ToFu** handles apertures as 3D planar polygons which, to this day, have the following attributes: - -.. list-table:: The attributes of an Apert object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.Poly, self.PolyN - - A (3,N) numpy array representing a planar polygon in 3D (X,Y,Z) coordinates, and the number of points that this polygon is comprised of. - * - self.BaryP, self.BaryS, self.S, self.nIn - - The barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume. - * - self.Tor - - The Tor object associated to the Detect object - -The Apert object is mainly used as a computing intermediate for the Detect object. However, it does come along with some key plotting methods aimed at giving you an idea of its geometry in the usual projections (poloidal and toroidal) and in 3D. - -Similarily, **ToFu** handles apertures as 3D planar polygons (i.e.: the polygon embodying the active surface of the detector) which, to this day, have the following attributes: - -.. list-table:: The attributes of a Detect object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.Poly, self.PolyN - - A (3,N) numpy array representing a planar polygon in 3D (X,Y,Z) coordinates, and the number of points that this polygon is comprised of. - * - self.BaryP, self.BaryS, self.S, self.nIn - - The barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume - * - self.Tor - - The Tor object associated to the Detect object - * - self.LApert - - A list of Apert objects associated to the Detect object - * - self.LOS - - A LOS object corresponding to the geometrically optimal LOS going through self.BaryS and through the center of mass of the intersection of all the associated Apert objects. Its value is 'Impossible' if the geometry you built does not allow for the existence of a LOS (i.e.: if the volume inside Tor viewed by the detector through its apertures is zero). - * - self.LOS_TorAngRef - - The value of toroidal angle corresponding to the position of the middle of the LOS (between self.LOS.PIn and self.LOS.POut), used by plotting methods, can be different from the toroidal angle of the detector if the LOS does not stand in a poloidal cross-section. - * - self.LOS_Etend_0Approx, self.LOS_Etend_0ApproxRev, self.LOS_Etend_PerpSamp, self.LOS_Etend_Perp, self.LOS_Etend_RelErr - - Values of the etendue, computed respectively using a fast 0th order approximation, a 0th order approximation reversed, a sampled integral in a plane perpendicular to the LOS, an adaptative integral in a plane perpendicular to the LOS. The last attribute is the relative error tolerance used for the adaptative computation of the integral (default is 0.01 %). - * - self.Span_R, self.Span_Theta, self.Span_Z, self.Span_k - - The tuples indicating the min and max values of the cylindrical (R,Theta,Z) coordinates inside which the viewing cone of the Detect object can be found. These are limits that define a box inside which the viewing cone is found, they do not give the viewing come itself. The Span_k attribute corresponds to the span of the component along self.LOS.u that can be reached inside the viewing cone (estimated by sampling the viewing cone into more than 1000 LOS - the exact number depends on self.Poly and on the shapes of the apertures and can be tuned by parameters). - * - self.Span_NEdge, self.Span_NRad - - The parameters that were used for computing the span in cylindrical coordinates of the system. The first one quantifies the number of extra points added on the polygon edges, and the second one the number of extra points added in the radial direction. - * - self.Cone_PolyPol, self.Cone_PolyTor - - The poloidal and toroidal projectiosn of the 3D viewing cone of the {detector+apertures} system. These projected polygons are useful for visualising the detected volume (or rather its projections) and for fast discrimination of points which are inside / outside of this detected volume (i.e.: fast computation of integrated signal) - -Now we are going to create two arbitrary Apert objects and one Detect object to show how it is done and what information it gives access to. -As already mentioned, the various Apert objects associated to a Detect object must be planar polygons, but they do not need to be coplanar, and they can have any arbitrary shape, hence: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 118-136 - -Note that the computation may take some time (several seconds) due to the accurate computation of the etendue. If you do not need the etendue, you can avoid its computation using the kwdarg 'CalcEtend'=False (default value is True). -Once we can check that the constructed geometry is relevant (i.e.: that it allows for a non-zero detected volume, which means that a LOS should exist), we can plot the associated Detect elements and LOS: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 139,143 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_AllProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of a Detect object with all its associated objects (2 Apert objects, a Tor object, and a subsequent LOS object) - :align: center - - Poloidal and toroidal projections of a Detect object with all its associated objects (2 Apert objects, a Tor object, and a subsequent LOS object) - -As said earlier, the three polygons do not have to be coplanar, as is visible on the next figure on which we only plotted the two Apert objects and the Detect object (with their perpendicular vectors), as well as the start point of the LOS and its entry point into the Tor volume (in blue): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 140,143 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_3D.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: 3D plot of an arbitrary Detect object with two non-coplanar Apert objects - :align: center - - 3D plot of an arbitrary Detect object with two non-coplanar Apert objects - -Since the Detect object has a LOS object as an attribute, all the LOS methods are accessible via this LOS attribute, making it easy to plot the coordinates in projection space of the LOS of this particular Detect object. - -We saw that the etendue is computed automatically when the Detect object is defined. This is done via numerical integration, on a plane perpendicular to the goemetrically optimal LOS, of the solid angle subtended by the Detect and its Apert objects. **ToFu_Geom** thus has built-in routines to compute that solid angle from any point in the 3D Tor volume. This will also be useful to compute the total signal received by the detector from a given radiation field. -Of course, when taken on a plane perpendicular to the geometrically optimal LOS, the solid angle decreases as we get to the edge of the viewing cone. You can visualise the solid angle on any plane perpendicular to the LOS simply by choosing its relative position on the LOS via the 'Ra' kwdarg of the following method (0 and 1 corresponding respectively to the PIn and POut points of the LOS): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 145,147 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SolAngPlane.png - :height: 500px - :width: 800 px - :scale: 100 % - :alt: Surface plot of the solid angle subtended by the {Detector + Apertures} system as seen from points on a plane perpendicular to the system's LOS and placed at mid-length of the LOS - :align: center - - Surface plot of the solid angle subtended by the {Detector + Apertures} system as seen from points on a plane perpendicular to the system's LOS and placed at mid-length of the LOS - -The value in parenthesis in the title is a ratio (here 1 %) used to plot make sure the plot includes the entirety of the viewing cone in this plane (i.e.: the plotting surface is 1 % larger than the estimated support of the viewing cone). The reason why this surface has no easily reckognisable shape is due to the fact that it comes from a system consisting of 3 non-coplanar polygons with various shapes. If we had used a square detector with a coplanar square aperture, the square shape would have been visible on the iso-contours of the solid angle. - -Similarily, it is possible to simply plot the evolution of the etendue (solid angle integrated on the plane) as a function of the point's distance on the LOS (to check that it remains constant), using three different integration methods (two via discretisation and one via an adaptative algorithm), this may take a while because the etendue has to computed Nx3 times (3 times for each point): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 150,152 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Etendue of the {Detector + Apertures} system as a function of the relative distance along the LOS (from 0 = self.LOS.PIn to 1 = self.LOS.POut), with three different integration methods using their defaults settings - :align: center - - Etendue of the {Detector + Apertures} system as a function of the relative distance along the LOS (from 0 = self.LOS.PIn to 1 = self.LOS.POut), with three different integration methods using their defaults settings - -We can see that the default settings used for each method are sufficient to give an accurate computation of the etendue that remains constant along the LOS, as it should. - -Now, in order to explore further the geometry of the system, we can plot the value of the solid angle in any poloidal plane (respectively horizonntal plane) intersecting the viewing cone, we can then visualise the viewing cone: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 155-158,161 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolSlice.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same poloidal slice (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same poloidal slice (this will be useful for systems with several detectors) - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorSlice.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same horizontal slice (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system (Right) Number of detectors that can "see" each point of the same horizontal slice (this will be useful for systems with several detectors) - -By default the poloidal slice is the plane which interssects the LOS at mid-length, by you can choose any toroidal angle by using the "Theta" kwdarg. -Note that the above plots are poloidal (resp. horizontal) *slices*, not *projections*. In its current version, **ToFu_Geom** only allows to plot *projections* by computing the solid angle for several discrete *slices* (25 by default, plus particular slices including self.LOS.PIn, self.LOS.POut and the mid-length point) close to each other and plotting the maximum value for each points (computation is very long in the current, non-optimised, python-only version, typically 20-30 min for 10 slices): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 164-167,170 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorProj.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - -Notice that there is a collision-detection routine in the ray tracing code that takes into account the fact that the viewing cone is limited by the Tor instance. This caveat can be de-activated by using the "Colis" kwdarg (=True by default), as illustrated in the following: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 173-176,179 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngPolProj_NoColis.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a poloidal projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SAngTorProj_NoColis.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - :align: center - - (Left) Contour plot of the solid angle subtended by the {Detector+Apertures} system in a horizontal - or toroidal - projection with de-activated collision detection (Right) Number of detectors that can "see" each point (this will be useful for systems with several detectors) - -These plotting commands give a pretty good idea of the fraction of the Tor volume which is seen by the detector through its associated apertures. -It is actually these functionalities (plotting poloidal and toroidal projections of the solid angle) that are used to extract the poloidal and toroidal projections of the viewing cone as two sets of 2D polygons (i.e.: the 0 iso-contours of the solid angle projections). These two projected polygons can be simply plotted by adding 'C' (like 'cone') to the "Elt" kwdarg of the plot_PolProj and plot_TorProj plotting methods: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 182,184 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_AllProj_Cone.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of Detect elements, this time including the projected polygons of the viewing cone - :align: center - - Poloidal and toroidal projections of Detect elements, this time including the projected polygons of the viewing cone - -If you want to use the LOS approximation, you have to make sure it is valid. This approximation relies on several assumptions, one of which is that the etendue must remain constant along the LOS. We confirmed this in our case when we plotted it. However, we did not take into account the fact that a fraction only of the viewing stops where the LOS stops, and that the other fraction continues its way into the vacuum chamber. This means that there will be contributons to the signal which are not taken into account by the current LOS. An option could be to artificially extend the LOS through the central solenoid to the far end of the viewing cone, but this would still be unsufficient since the etendue that should be used for this extended part of the LOS is lower than the one we computed for the first part of the LOS. -This type of situations, in which a fraction of the viewing cone is obstructed, corresponds to situations in which the etendue is in fact not constant along the entirety of the *extended* LOS (i.e.: extended to the far end of the viewing cone), as illustrated below. It reveals the limits to the LOS approximation and the advantages of a 3D description of the geometry. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 187-189,191 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_EtendAlongLOS_Extend.png - :height: 500px - :width: 700 px - :scale: 100 % - :alt: Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. - :align: center - - Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. - -In addition to this effect, it also possible to visualise the difference between the LOS approximation and the real viewing cone by plotting the contour of the viewed volume in projection space, as illustrated below: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 195,197 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_Imp.png - :height: 500px - :width: 700 px - :scale: 100 % - :alt: Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. - :align: center - - Etendue of the {Detector + Apertures} system as a function of the relative distance along the *extended* LOS, with and without taking into account collisions, and along the former LOS. - -The more the area delimited by the contour is small, the better is the LOS approximation. We can clearly see here that the difference is significant. But it could nevertheless still be valid if the tomogram of the observed emissivity field was constant on this area (which is not the case in most standard situations). - -More that visualisation or computation of the etendue, knowing the two projected polygons of the viewing cone is helpful for faster integration of signal in a synthetic diagnostic approach. Indeed, we know that all points which are not in both projected polygons are necessarily outside of the viewing cone. Hence, they can be used for fast discrimination of points which are useless for the signal. - -Hence, the total incoming power on the detector for a given spectrally-integrated 3D emissivity field (provided as an input function of the position as a (3,1) numpy array) can be computed. As for the computation of the etendue, you can choose between three integration methods (via the "Mode" kwdarg), among which two discretisation methods and an adaptative algorithm (computation may be very long for high resolution discretisation with Colis=True). The following example shows a simple gaussian profile, constant on the toroidal direction: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 201-214,216-219 - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 220 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_Detect_SynthDiag.png - :height: 500px - :width: 400 px - :scale: 100 % - :alt: Fake (double gaussian) SXR emissivity field (toroidally invariant) which resembles one of the typical cases of ASDEX Upgrade - :align: center - - Fake (double gaussian) SXR emissivity field (toroidally invariant) which resembles one of the typical cases of ASDEX Upgrade - -As one can expect, the signal is higher when collisions with the Tor boundary are not considered because of the contribution from the plasma volume which should be hidden behind the central solenoid. - -This direct approach is most accurate (provided sufficient discretisation of the integral) since it does not rely on a generic pre-defined spatial discretisation of the 3D emissivity on a mesh. -Such discretisation is nonetheless necessary for tomographic inversions and allows for much faster synthetic diagnostic computation since the input emissivity function can be projected on so-called 'basis functions' with pre-computed contributions (via the so-called geometry matrix) to each detector. Spatial discretisation is addressed in the **ToFu_Mesh** module and the computation of the geometry matrix (both with a 3D and a LOS approach) is addressed in the **ToFu_MatComp** module. - -But before, let us describe the last object class of **ToFu_Geom**, which is the Detect equivalent of the GLOS object class. - - - -The GDetect object class ------------------------- - -The GDetect object class provides an easy way to handle groups of detectors which have some features in common, like the GLOS object class does for LOS objects. It is basically a list of Detect objects with a common name and adapted methods for easily computing and plotting the characteristics of all detectors it contains with a single-line command. It also comes with selection methods to extract a sub-set of its Detect objects. - -.. list-table:: The attributes of a GDetect object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.LDetect, self.nDetect - - A list of Detect objects, which should have the same Tor object, and the number of Detect object it contains - * - self.BaryP, self.BaryS, self.S, self.nIn - - The barycenter of self.Poly and its center of mass, its surface and the normalised vector perpendicular to the plane of self.Poly and oriented towards the interior of the Tor volume - * - self.Tor - - The Tor object associated to the Detect object - * - self.LApert - - A list of Apert objects associated to the Detect object - -Naturally, the methods are similar to both the GLOS object and Detect object class. -In the following, the GDetect object class is illustrated with the geometry of the F camera of the SXR diagnostic of ASDEX Upgrade. -Once it is loaded as a **ToFu_Geom** GDetect instance, we can use the built-in methods to explore its characterisrics, like the etendue of each detector it is comprised of: - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 237-242,244 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_Etend.png - :height: 400 px - :width: 800 px - :scale: 100 % - :alt: Etendues of the detectors composing the F camera of ASDEX Upgrade - :align: center - - Etendues of the detectors composing the F camera of ASDEX Upgrade, computed using the usual 0-order approximation in both ways (direct and reverse), and using a complete integration with an adaptative algorithm (with reltive error < 0.01 %) and a sampling algorithm. - -We can also visualise the lines of sight and projected viewing cones of all the detectors. In the following example, we use the 'Elt'-type kwdrg to specify that we first want to plot the viewing cone and the polygon constituting the detectors ('CP'), with the polygons of the apertures ('P') and the reference polygon of the Tor ('P'), but no LOS (''). Then we plot the LOS ('L') but not the viewing cones. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 248-249,255 - - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProjC.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of the geometry of F, with the viewing cones - :align: center - - Poloidal and toroidal projections of the geometry of F, with the viewing cones - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProjL.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of the geometry of F, with the LOS - :align: center - - Poloidal and toroidal projections of the geometry of F, with the LOS - -We can also select one particular detector to plot it only. To do this we can use the dedicated routine which return the index of a detector reckognizable by one its ID attributes (its name, its signal code, its savename or any ID attribute that you have previously passed). - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 250-251,255 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_AllProj_F019.png - :height: 400px - :width: 800 px - :scale: 100 % - :alt: Poloidal and toroidal projections of the geometry of one particular detector of F - :align: center - - Poloidal and toroidal projections of the geometry of of one particular detector of F - -It is also interesting to plot the LOS and viewing cones in projection space, to see how a realistic diagnotic looks like in this representation and see how far we are from a pure LOS (specifying we want the LOS 'L', the viewing cone 'C' and the Tor enveloppe 'T'): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 258,260 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_Impact.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Representation in projection space of both the LOS and the viewing cones of F, with the Tor enveloppe - :align: center - - Representation in projection space of both the LOS and the viewing cones of F, with the Tor enveloppe. One detector spans from theta values close to pi to values close to 0, which explains the boundaries of its associated cone streching from one end of the graph to the other (in reality, it should be separated in two polygons on this graph). - -We can see that the surfaces corresponding to the viewing cone are resonably small (and quite elongated), which is an indication that the LOS approximation seems a reasonable hypothsesis from a purely geometrical point of view, but of course, in practice it also depends on the nature / shape of the observed emissivity field. - -When it comes to computing the signal of each detector associated to an arbitrary input emissivity field, one must keep in mind that while the LOS approximation allows for fast but approximate computation, a full-3D approach ives an accurate result, but is much slower. While fractionaof second is sufficient for a LOS computation, several minutes can be necessary for each detector for a full 3D computation. Of course, it depends on the volume which is inside the viewing cone and on the level of accuracy to be obtained. The method used is simple sampling in cartesian coordinates of the viewing cone. The default is a uniform grid of 5mmx5mmx5mm, which appears sufficient for most standard cases. But the user can choose his own grid size by using the 'dX12' (=[0.005,0.005] by default, in the plane perpendicular to the LOS) and 'ds' (=0.005 by default, along the LOS). -Since the user may often need to evaluate the signal not only once but several times for each detector (fro example to plot the time evolution of the signal), it is possible to store a pre-computed grid (the solid angle, which is the longest value to calculate, is pre-computed) and use it for all the successive computations (the pre-computed solid angle is then simply multiplied by the local value of the input emissitivity and integration is performed by summation and multiplication by the elementary volume). - -An example is given below, where three input emissivity fields are provided. The first one is toroidally constant, the local maximum of the second one rotates as if it were a hot spot on the q=1 surface, and the last one is toroidally constant but has an anisotropic radiation (it radiates 100 times more in the toroidal direction). - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 200-207, 262-275, 277-289 - -Since we know we are going to use the same grid several times, we pre-compute it (using the default parametrisation), the pre-computed matrix is then automatically assigned as a new attibute of each Detect object (this may take 2-5 min for each detector ToDo : implement a full C-version of the bottleneck routines for faster computation): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 302-303 - -And then we compute the LOS and 3D signals, specifying that we want to use the pre-computed grid for faster computation (now the computation should take less than a second for each detector): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 307-309,311 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig1.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: The integrated signals of camera F with a toroidally constant input emissivity (both with a LOS and 3D approach) - :align: center - - The integrated signals of camera F with a toroidally constant input emissivity (both with a LOS and 3D approach) - -It can be seen that even for toroidally constant emissivity, there are some small differences between the pure LOS integration and the full 3D computation (of the order of 1-2 % in the most central LOS, and up to 10 % near the edge). In order to check that these differences are real and are not due to discretization errors or bad implementation of the 3D integrating algorithm, we can do the following: we provide as an input an emissivity field that only varies with Z. Indeed the F camera is mostly looking upward, hence, if the emissivity field only changes with Z, the validity of the LOS approximation should be very good and the difference the LOS and 3D integrations should be minimal since the emissivity is indeed quasi-constant on planes perpendicuar to the LOS. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 293-299 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_SigZ.png - :height: 400 px - :width: 600 px - :scale: 100 % - :alt: The integrated signals of camera F with a horizontally constant input emissivity (both with a LOS and 3D approach) - :align: center - - The integrated signals of camera F with a horizontally constant input emissivity (both with a LOS and 3D approach) - -We can see here indeed that the agreement is particularly good for the most central LOS (which are the most vertical, hence the LOS approximation holds mostly for them), and less for the edge LOS which are more and more inclinated with respect to the iso-emissivity surfaces. - -We can now try to do the same for the second input emissivity (with "m=1-like" perturbation): - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 323-327 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig2.png - :height: 400 px - :width: 600 px - :scale: 100 % - :alt: The integrated signals of camera F with a toroidally varying input emissivity (both with a LOS and 3D approach) - :align: center - - The integrated signals of camera F with a toroidally varying input emissivity (both with a LOS and 3D approach) - -We observe that the change with respect to the toroidally constant emissivity is not dramatic, which can be explained by the averaging effect of the cone of sight. - -And finally for the anisotropic emissivity, note that in this case we have to specify to the method that the emissivity is anisotropic. - -.. literalinclude:: ../../src/Tutorial_ToFu_Geom.py - :language: python - :lines: 332-336 - -.. figure:: figures_doc/Fig_Tutor_ToFuGeom_GDetect_Sig3.png - :height: 400 px - :width: 600 px - :scale: 100 % - :alt: The integrated signals of camera F with an anisotropic input emissivity (both with a LOS and 3D approach) - :align: center - - The integrated signals of camera F with an anisotropic input emissivity (both with a LOS and 3D approach) - -Unsurprisingly, this case displays the most dramatic differences between the LOS approach, intrinsically limited, and the 3D computation. The observed differences range from a few percents (very small anisotropy) to several orders of magnitude (purely forward-domiated radiation). The next challenge is to determine whether we will able to reconstruct such anisotropies in an inverse-problem approach, which will be addressed in **ToFu_MatComp** and **ToFu_Inv**. - -.. Local Variables: -.. mode: rst -.. End: diff --git a/_Old_doc/build/html/_sources/ToFu_Inv.txt b/_Old_doc/build/html/_sources/ToFu_Inv.txt deleted file mode 100644 index 1e0cf5d11..000000000 --- a/_Old_doc/build/html/_sources/ToFu_Inv.txt +++ /dev/null @@ -1,77 +0,0 @@ -.. role:: envvar(literal) -.. role:: command(literal) -.. role:: file(literal) -.. role:: ref(title-reference) - - -ToFu_Inv -======== - -(This project is not finalised yet, work in progress...) - -**ToFu_Mesh**, is a ToFu module aimed at handling spatial discretisation of a 3D scalar field in a vacuum chamber (typically the isotropic emissivity of a plasma). Such discretisation is done using B-splines of any order relying on a user-defined rectangular mesh (possibily with variable grid size). It is particularly useful for tomographic inversions and fast synthetic diagnostics. - -It is designed to be used jointly with the other **ToFu** modules, in particular with **ToFu_Geom** and **ToFu_MatComp**. It is a ToFu-specific discretisation library which remains quite simple and straightforward. However, its capacities are limited to rectangular mesh and it may ultimately be percieved as a much less powerful version of **PIGASUS/CAID**. Users who wish to use **ToFu** only for tomographic inversions may find **ToFu_Mesh** sufficient for thir needs, others, who wish to use a synthetic diagnostic approach, and/or to use **ToFu_Mesh** jointly with plasma physics codes (MHD...) may prefer using **PIGASUS\CAID** for spatial discreatisation. - -Hence, **ToFu_Mesh** mainly provides two object classes : one representing the mesh, and the other one (which uses the latter) representing the basis functions used for discretisation: - -.. list-table:: The object classes in **ToFu_Geom** - :widths: 10 30 20 - :header-rows: 1 - - * - Name - - Description - - Inputs needed - * - ID - - An identity object that is used by all **ToFu** objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...) - - By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory. - * - Mesh1D, Mesh2D, Mesh3D - - 1D, 2D and 3D mesh objects, storing the knots and centers, as well as the correspondence between knots and centers in both ways. The higher dimension mesh objects are defined using lower dimension mesh objects. The Mesh 2D object includes an enveloppe polygon. They all include plotting methods and methods to select a subset of the total mesh. The Mesh 3D object is not finished. - - A numpy array of knots, which can be defined using some of the functions detailed below (for easy creation of linearly spaced knots with chosen resolution). - * - BaseFun1D, BaseFunc2D, BaseFunc3D - - 1D, 2D and 3D families of B-splines, relying on Mesh1D, Mesh2D, Mesh3D objects, with chosen degree and multiplicity for each dimension. Includes methods for plotting, for determining the support and knots and centers associated to each basis function, as well as for computing 1st, 2nd or 3rd order derivatives (as functions), and local value (summation of all basis functions or their derivatives at a given point and for given weights). Includes methods for computing integrals of derivative operators... - - A Mesh object of the adapted dimension, and a degree value. - - -The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own mesh and basis functions and access its main characteristics. - -Getting started with **ToFu_Mesh** ----------------------------------- -Once you have downloaded the whole **ToFu** package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called Polygon which can be downloaded at ), just start a python interpreter and import **ToFu_Geom** (we will always import **ToFu** modules 'as' a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called **ToFu_PathFile**, and **matplotlib** and **numpy** will also be useful: - -.. literalinclude:: ../../src/Tutorial_example.py - :language: python - :lines: 7-12 - -The os module is used for exploring directories and the cPickle module for saving and loading objects. - -The Tor object class --------------------- - -To define the volume of the vacuum chamber, you need to know the (R,Z) coordinates of its reference polygon (in a poloidal cross-section). You should provide it as a (2,N) numpy array where N is the number of points defining the polygon. To give the Tor object its own identity you should at least choose a name (i.e.: a character string). For more elaborate identification, you can define an ID object and give as an input instead of a simple name. You can also provide the position of a "center" of the poloidal cross-section (in 2D (R,Z) coordinates as a (2,1) numpy array) that will be used to compute the coordinates in transformation space any LOS using this Tor object (and the sinogram of any scalar emissivity field using this Tor object). If not provided, the center of mass of the reference polygon is used as a default "center". - -In the following, we will use the geometry of ASDEX Upgrade as a example. -We first have to give a reference polygon ('PolyRef' below) as a (2,N) numpy array in (R,Z) coordinates. - -.. literalinclude:: ../../src/Tutorial_example.py - :language: python - :lines: 21-26 - - -Alternatively, you can store PolyRef in a file and save this file locally, or use one of the default tokamak geometry stored on the **ToFu** database where Tor input polygons are stored in 2 lines .txt files (space-separated values of the R coordinates on the first line, and corresponding Z coordinates on the second line). Here, we use the default ASDEX Upgrade reference polygon stored in AUG_Tor.txt. - -.. literalinclude:: ../../src/Tutorial_example.py - :language: python - :lines: 29-33 - -We now have created two Tor objects, and **ToFu_Geom** has computed a series of geometrical characteristics that will be useful later (or that simply provide general information). -TO BE FINISHED !!!!!!!!!!!!!!! - - -.. math:: - - \nabla^2 u = \sin(x) - -.. Local Variables: -.. mode: rst -.. End: diff --git a/_Old_doc/build/html/_sources/ToFu_MatComp.txt b/_Old_doc/build/html/_sources/ToFu_MatComp.txt deleted file mode 100644 index 31b2c3983..000000000 --- a/_Old_doc/build/html/_sources/ToFu_MatComp.txt +++ /dev/null @@ -1,268 +0,0 @@ -.. role:: envvar(literal) -.. role:: command(literal) -.. role:: file(literal) -.. role:: ref(title-reference) - - -**ToFu_MatComp** -================ - -(This project is not finalised yet, work in progress...) - -**ToFu_MatComp**, is a ToFu module aimed at computing the geometry matrix associated to a diagnostic geometry from **ToFu_Geom** and a set of basis functions from **ToFu_Mesh**. From the first, it requires either a GDetect object or a GLOS object (keep in mind that a GDetect object automatically includes its associated GLOS object), or more simply a list of Detect objects or LOS objects (in case you don't want to define their group equivalent for any particular reason). From **ToFu_Mesh**, it requires a BaseFunc2D or BaseFunc3D object. - -The outupt (i.e.: the computed geometry matrix) can be retrieved directly as a numpy array, or as a **ToFu_MatComp** object, which includes the array as an attribute and also provides useful methods to quickly explore its main characteristics, as illustrated in the following. - -Hence, **ToFu_MatComp** provides the following object classes : - -.. list-table:: The object classes in **ToFu_Geom** - :widths: 10 30 20 - :header-rows: 1 - - * - Name - - Description - - Inputs needed - * - GMat2D - - A geometry matrix computed from a BaseFunc2D object (i.e.: a 2D set of basis functions, assuming the toroidal angle is an ignorable coordinate). - - A GDetect object or a list of Detect objects, and a BaseFunc2D object. - * - GMat3D - - A geometry matrix computed from a BaseFunc3D object (i.e.: a 3D set of basis functions, not implemented yet...). - - To do... - - -The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to compute your own geometry matrix and access its main characteristics. - -Getting started with ToFu_MatComp ---------------------------------- - -To use **ToFu_MatComp**, you first need to import it as well as **ToFu_PathFile**. Of course, **matplotlib** and **numpy** will also be useful. - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 7-15 - -The os module is used for exploring directories and the cPickle module for saving and loading objects. -We first need to load a BaseFunc2D object (created using **ToFu_Mesh** and saved), as well as a **GDetect** object (created with **ToFu_Geom** and saved): - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 20-23 - -In the following, we will illustrate the capacities of **ToFu_MatComp** with the F camera of the SXR diagnostic of ASDEX Upgrade and a relatively coarse 2D mesh with resolution around 2 cm near in the central region and around 6 cm near the edge, on which degree 0 bivariate B-splines have been imposed (we will illustrate later the use of 1st and 2nd order bivariate B-splines). - -Now we simply need to build the associated geometry matrix. Computation will be done in two steps : first, an index matrix will be computed (a numpy array of boolean) that indicates for each detector which mesh elements it can see (by checking whether they are it is projected viewing cone), this first step typically takes 1-5 min for each detector and helps a lot making the second step faster. The second step consists in proper computing of the integrated contribution of each basis function for each detector. This is obviously longer and typically takes 2-6 min per detector (instead of at least 10 times more without the first step). -Finally, both for comparison purposes and for those users who want to use a pure LOS approach, another geometry matrix is computed with a pure LOS approximation, which is obviously much faster and typically takes 0.01-1 s per detector (as always, it depends on the mesh resolution and basis function degree). - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 26-28 - -Now that we have a proper GMat2D object, let us use its built-in methods to explore its properties. - -First of all, we can plot the total contribution (from all the basis functions) to each detector simply by plotting the sum of the geometry matrix, and comparinmg it to the sum of the LOS-approximated geometry matrix. You can do this manually or use the dedicated built-in method, which also shows the sum in the other dimension (i.e.: the total contribution of each basis function to all detectors): - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 31,33 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM0_Sum.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 0th order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - :align: center - - Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 0th order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - - -We can see that there seems to be little difference between the full 3D and the LOS approximated matrices, but let us go a little further into the details by visualising the values of the gometry matrix for a particular chosen detector, and compare it to its LOS-approximated equivalent: - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 35-36,38 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM0_Detect.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each 0th order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - :align: center - - Total contribution of each 0th order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - -Similarly, we can go the other way around and visualise the values of the geometry matrix for any chosen basis function (and thus see how it contributes to various detectors): - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 40,42 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM0_BF.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of a particular 0th order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. - :align: center - - Total contribution of a particular 0th order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. - -We can see significant differences when we consider the details of a specific line (or column) of the geometry matrix, which is important because it provides the set of equations that link the basis functions to the measurements. If you want to perform an inversion, you should pay particular attention to this set of equations as the tomography problem hinges on Fredholm integral equations of the first kind, making it an ill-posed problem particularly sensitive to errors both in the measurements and in the equations. - -In summary, Despite similar sum (i.e. total contribution to each detector), we observe that with the LOS approximation the number of pixels that contribute to the signal is smaller but that their contribution is generally over-estimated as compared to the full 3D computation. If we consider each line of the geometry matrix, this line represents the equation associated to a particular detector measurement f_i: - -.. math:: - f_i = M_{i,1}b_1 + M_{i,2}b_2 + ... + M_{i,N}b_N - -Our observation then means that both computations give the same sum of terms on the right hand side, but that the LOS approximation tends to give higher values but for a fewer number of terms, thus affecting the spread of the weights on the different terms. This is an important limitation of the LOS approximation when it used to compute a geometry matrix using pixels as basis functions. - -Now let us consider the same matrix but computed with 1st and 2nd order bivariate B-splines: - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 46,50-51,55,57 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_Sum.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 1st order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - :align: center - - Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 1st order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_Detect.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each 1st order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - :align: center - - Total contribution of each 1st order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_BF.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of a particular 1st order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. - :align: center - - Total contribution of a particular 1st order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ignored by the LOS approximation, while in reality it is seen by two detectors. - - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 59,63-64,68,70 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_Sum.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 2nd order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - :align: center - - Total contribution of each basis function (top) and total contribution to each detector (bottom) for a 2nd order set of B-splines and the F camera of ASDEX Upgrade, with both LOS and 3D computations - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_Detect.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of each 2nd order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - :align: center - - Total contribution of each 2nd order basis function to detector F_016 of ASDEX Upgrade, decomposed on mesh elements (top) and basis functions (bottom), with both LOS and 3D computations - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_BF.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Total contribution of a particular 2nd order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ognored by the LOS approximation, while in reality it is seen by two detectors. - :align: center - - Total contribution of a particular 2nd order basis function to each detector of camera F of ASDEX Upgrade, with both LOS and 3D computations, the chosen pixel is ignored by the LOS approximation, while in reality it is seen by two detectors. - -We see that the overlapping of higher-order basis functions ensures a more balanced distribution of the weights computed with a LOS approximation. This, and the fact that the basis functions are more regular, makes higher order basis functions a valuable improvement for tomographic inversions using a geometry matrix computed with a LOS approximatiopn. Obviously a full 3D computation remains even more accurate. - -Now that the geometry is computed (with whatever method or basis functions), it can be used as in two ways : either as the set of equation necessary for solving the tomographic inversions (see **ToFu_Inv**), or as a pre-computed intermediate for forward-modelling or synthetic diagnostic (i.e.: reconstructing the measurements assuming an input emissivity field). This method only requires that the chosen basis functions are relevant for the input emissivity (i.e.: don't use a GMat2D object if the emissivity is not toroidally constant, or if the emissivity is anisotropic). Once you are sure that you have a relevant set of basis functions with their associated geometry matrix, just fit the basis functions to the input emissivity (this will give you the coefficients of each basis function) and use the geometry matrix to get the associated measurements, as illustrated below: - -.. literalinclude:: ../../src/Tutorial_ToFu_MatComp.py - :language: python - :lines: 74-87,89,91,93 - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM0_Sig.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Synthetic diagnostic using decomposition of an input emissivity on a set of 0th order B-splines, geometry matrix computed with both 3D and LOS approach - :align: center - - Synthetic diagnostic using decomposition of an input emissivity on a set of 0th order B-splines, geometry matrix computed with both 3D and LOS approach - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM1_Sig.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Synthetic diagnostic using decomposition of an input emissivity on a set of 1st order B-splines, geometry matrix computed with both 3D and LOS approach - :align: center - - Synthetic diagnostic using decomposition of an input emissivity on a set of 1st order B-splines, geometry matrix computed with both 3D and LOS approach - -.. figure:: figures_doc/Fig_Tutor_ToFuMatComp_GM2_Sig.png - :height: 400px - :width: 600 px - :scale: 100 % - :alt: Synthetic diagnostic using decomposition of an input emissivity on a set of 2nd order B-splines, geometry matrix computed with both 3D and LOS approach - :align: center - - Synthetic diagnostic using decomposition of an input emissivity on a set of 2nd order B-splines, geometry matrix computed with both 3D and LOS approach - -This method is faster than the direct, brute-force computation introduced in **ToFu_Geom**, but is limited by the relevance of the basis functions with respect to the input emissivity. -We can see that the LOS approximation generally gives better results (in a synthetic diagnostic approach) when used with higher-order basis functions (as explained earlier). The difference is visible between 0th and 1st order basis functions (but not so much between 1st and 2nd order basis functions). - -Furthermore, another general tendency appears : the LOS approximation tends to underestimate the signal for the lines on the High Field Side (HFS) and to overestimate it for the LOS on the Low Field Side (LFS), with respect to the region of maximum emissivity. This is consistent with the fact that the toroidicity induces a general shift towards the LFS. Hence the geometrically optimal LOS (from the center of mass of the detector to the center of mass of the intersection of of all its appertures) is optimal in cartesian coordinates but not in cylindrical coordinates. A different LOS (chosen taking into account the toroidicity, for example by computing the center of mass the viewing cone in (R,theta) coordinates) would probably help solve this issues and would allow you to use a pure LOS approximation with better validity (to do in ToFu_Geom...). - -Again, the above numerical results are just helpful to understand what's going on, but keep in mind that the degree of accuracy of the LOS approximation not only depends on the geometry, but also on the input emissivity that you are using (i.e.: large gradients / curvature, toroidal changes, anistropy, localised particular features...). - - -Limits to the LOS approximation for the geometry matrix computation -------------------------------------------------------------------- - -As we saw, the fact that the final solution is probably compatible with the LOS approximation (i.e. it has 'reasonable' curvature across most viewing cones) does not mean that the LOS approximation is valid with the basis functions used to compute it. Indeed, the LOS approximation can basically be written as follows: - -.. math:: - f_i = \int_{LOS}\iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2Sdl = E_i\int_{LOS}\left<\epsilon^{\eta}\right>dl\\ - -Where E_i is the etendue of detector i and: - -.. math:: - \begin{array}{l} - \left<\epsilon^{\eta}\right> = \frac{1}{E_i} \iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2S\\ - E_i = \iint_S\int_{4\pi}\delta_id^2\Omega d^2S - \end{array} - -The corresponding LOS-approximated signal would be: - -.. math:: - \hat{f_i} = E_i\int_{LOS}\epsilon^{\eta}dl - -Hence, the LOS approximation is valid if we can safely assume that the local value of the emissivity on the LOS is a approximation of its value averaged on the surface perpendicular to the LOS: - -.. math:: - \hat{f_i} \approx f_i \Leftrightarrow \int_{LOS}\left<\epsilon^{\eta}\right>dl \approx \int_{LOS}\epsilon^{\eta}dl - -Which can be fulfilled if (but not exclusively if): - -.. math:: - \forall \ l \ \in \ LOS \ , \ \epsilon^{\eta}\left(l\right) \approx \frac{1}{E_i} \iint_S\int_{4\pi}\epsilon^{\eta}\delta_id^2\Omega d^2S\left(l\right) = \left<\epsilon^{\eta}\right>\left(l\right) - -When applied to a physical (i.e. real) emissivity field, this assumption may hold because the emissivity field usually considered varies sufficiently slowly in the direction perpendicular to the LOS (in the limits of the viewing cone). However, when computing the geometry matrix, this equation is not applied to a 'physiscal' emissivity field, but to individual basis functions. In particular, if the nature of these basis functions allows for steep variations across the LOS and in the limits of the viewing cone, then the LOS approximation may need to be questionned. - -A typical case is pixels (i.e.: 0th order bivariate B-splines), particularly when they have a size too small compared to the local beam width (ref : my thesis + ingesson). In such cases, the above integral, taken for a single pixel (which is what is evaluated in the geometry matrix) can in no way be approximated by the value in the pixel (i.e.: on the LOS). - -This is a very common mistake : even if the LOS approximation is valid for the final solution, it does not mean it is valid for the basis functions that you are using ! And, ironicaly, using it for computing the geometry matrix anyway will lead to a final solution that will be less regular than it should be because the LOS approximation tends to overestimate the contribution of some pixels and underestimate the contribution of others. Paradoxically, not using the LOS approximation for the geometry matrix is both more physical and leads to solutions which are more likely to be compatible with this LOS approximation ! Again, this depends on the basis functions you are using (nature and size). - -If you still want to use pixels and the (pure) LOS approximation together, a rule of thumb to limit the bias is to use pixels of a size comparable to the beam width in the region where the signal is maximal (but there will still be situations in which the approximation will not hold, see : my thesis). Another quite common solution is to make your own 'homemade' routine to compensate for the beam width (for example with anti-aliasing, with adaptative LOS or with hybrid 1D-2D-3D solutions). Most people have their own tricks to compensate in a way or another. - -Other solutions are either to use the LOS approximation but with different basis functions such as 1st or 2nd order bivariate B-splines (both because their support overlap and because they are more regular), or 'simply' not to use the LOS approximation (but an accurate full 3D computation requires a lot of painful work). Since both these solutions are fully implemented in **ToFu**, you can start rejoicing and using it :-) - -.. Local Variables: -.. mode: rst -.. End: diff --git a/_Old_doc/build/html/_sources/ToFu_Mesh.txt b/_Old_doc/build/html/_sources/ToFu_Mesh.txt deleted file mode 100644 index 09b8d8fc0..000000000 --- a/_Old_doc/build/html/_sources/ToFu_Mesh.txt +++ /dev/null @@ -1,428 +0,0 @@ -.. role:: envvar(literal) -.. role:: command(literal) -.. role:: file(literal) -.. role:: ref(title-reference) - - -**ToFu_Mesh** -============= - -(This project is not finalised yet, work in progress...) - -**ToFu_Mesh**, is a ToFu module aimed at handling spatial discretisation of a 3D scalar field in a vacuum chamber (typically the isotropic emissivity of a plasma). Such discretisation is done using B-splines of any order relying on a user-defined rectangular mesh (possibily with variable grid size). It is particularly useful for tomographic inversions and fast synthetic diagnostics. - -It is designed to be used jointly with the other **ToFu** modules, in particular with **ToFu_Geom** and **ToFu_MatComp**. It is a ToFu-specific discretisation library which remains quite simple and straightforward. However, its capacities are limited to rectangular mesh and it may ultimately be percieved as a much less powerful version of **PIGASUS/CAID**. Users who wish to use **ToFu** only for tomographic inversions may find **ToFu_Mesh** sufficient for thir needs, others, who wish to use a synthetic diagnostic approach, and/or to use **ToFu_Mesh** jointly with plasma physics codes (MHD...) may prefer using **PIGASUS\CAID** for spatial discreatisation. - -Hence, **ToFu_Mesh** mainly provides two object classes : one representing the mesh, and the other one (which uses the latter) representing the basis functions used for discretisation: - -.. list-table:: The object classes in **ToFu_Geom** - :widths: 10 30 20 - :header-rows: 1 - - * - Name - - Description - - Inputs needed - * - ID - - An identity object that is used by all **ToFu** objects to store specific identity information (name, file name if the object is to be saved, names of other objects necessary for the object creation, date of creation, signal name, signal group, version...) - - By default only a name (a character string) is necessary, A default file name is constructed (including the object class and date of creation), but every attribute can be modified and extra attribute can be added to suit the specific need of the the data acquisition system of each fusion experiment or the naming conventions of each laboratory. - * - Mesh1D, Mesh2D, Mesh3D - - 1D, 2D and 3D mesh objects, storing the knots and centers, as well as the correspondence between knots and centers in both ways. The higher dimension mesh objects are defined using lower dimension mesh objects. The Mesh 2D object includes an enveloppe polygon. They all include plotting methods and methods to select a subset of the total mesh. The Mesh 3D object is not finished. - - A numpy array of knots, which can be defined using some of the functions detailed below (for easy creation of linearly spaced knots with chosen resolution). - * - BaseFun1D, BaseFunc2D, BaseFunc3D - - 1D, 2D and 3D families of B-splines, relying on Mesh1D, Mesh2D, Mesh3D objects, with chosen degree and multiplicity for each dimension. Includes methods for plotting, for determining the support and knots and centers associated to each basis function, as well as for computing 1st, 2nd or 3rd order derivatives (as functions), and local value (summation of all basis functions or their derivatives at a given point and for given weights). Includes methods for computing integrals of derivative operators... - - A Mesh object of the adapted dimension, and a degree value. - - -The following will give a more detailed description of each object and its attributes and methods through a tutorial at the end of which you should be able to create your own mesh and basis functions and access its main characteristics. - -Getting started with ToFu_Mesh ------------------------------- - -Once you have downloaded the whole **ToFu** package (and made sur you also have scipy, numpy and matplotlib, as well as a free polygon-handling library called Polygon which can be downloaded at ), just start a python interpreter and import **ToFu_Geom** and **ToFu_Mesh** (we will always import **ToFu** modules 'as' a short name to keep track of the functionalities of each module). To handle the local path of your computer, we will also import the small module called **ToFu_PathFile**, and **matplotlib** and **numpy** will also be useful: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 7-14 - -The os module is used for exploring directories and the cPickle module for saving and loading objects. - -The Mesh1D, Mesh2D and Mesh3D object classes --------------------------------------------- - -In this section, we describe the Mesh objects starting from the unidimensional to the 3D version. - -.. list-table:: The attributes of a Mesh1D object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.NCents, self.NKnots - - The number of mesh elements or centers (resp. knots) of the object (typically self.NKnots = self.NCents+1) - * - self.Cents, self.Knots - - The coordinates of the centers and knots themselves, as two numpy arrays - * - self.Lengths, self.Length, self.BaryL, self.BaryP - - The length of each mesh element, the total length of the mesh and the center of mass of the mesh (i.e.: weight by the respective length of each mesh element), and the barycenter of the self.Cents - * - self.Cents_Knotsind, self.Knots_Centsind - - The index arrays used to get the correspondence between each mesh element (resp, each knot) and its associated knots (resp. its associated mesh elements) - -.. list-table:: The attributes of a Mesh2D object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.MeshR, self.MeshZ - - The two Mesh1D objects used to create this Mesh2D object - * - self.NCents, self.NKnots - - The number of mesh elements or centers (resp. knots) of the object (typically self.NKnots = self.NCents+1) - * - self.Cents, self.Knots - - The coordinates of the centers and knots themselves, as two numpy arrays - * - self.Surfs, self.Surf, self.VolAngs, self.VolAng, self.BaryV, self.BaryS, self.BaryL, self.BaryP - - The surface of each mesh element, the total surface of the mesh, the volume per unit angle of each mesh element, the total volume per unit angle, the volume barycenter of the mesh (i.e. taking into account not only the surface repartition but also the toroidal geometry), the center of mass of the mesh (i.e.: weight by the respective surface of each mesh element), the middle point (the average between the extreme (R,Z) coordinates) and the barycenter of all the self.Cents - * - self.Cents_Knotsind, self.Knots_Centsind - - The index arrays used to get the correspondence between each mesh element (resp, each knot) and its associated knots (resp. its associated mesh elements) - * - self.BoundPoly - - The boundary polygon of the mesh, useful for fast estimation whether a point lies inside the mesh support or not. - -In an experiment-oriented perspective, **ToFu_Mesh** comes with simple functions to help you quickly define an optimal 1D grid, with explicit parametrisation of the spatial resolution on regions of interest. -For example, if you want to define a 1D grid with a 5 cm resolution near the first end, that gradually refines to 1 cm at a given point, stays 1 cm for a given length and is then gradually enlarged to 6 cm at the other end, you just have to feed in the points of interest and their associated resolution to the *LinMesh_List* function, as a two lists of corresponding (start,end) tuples. - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 20-22 - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 24-25 - -You can then feed the resulting knots numpy array to the Mesh1D object class and use this object methods to access all the features of interest of the created mesh: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 28-30,33 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M1.png - :height: 200px - :width: 500 px - :scale: 100 % - :alt: Arbitrary 1D mesh with customized resolution in chosen regions - :align: center - - Arbitrary 1D mesh with customized resolution in chosen regions - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M1_Res.png - :height: 250px - :width: 500 px - :scale: 100 % - :alt: Local spatial resolution of the created 1D mesh - :align: center - - Local spatial resolution of the created 1D mesh - -It can seen that the algorithm tried to render a mesh with the required resolution, even though it had to decrease it slightly around the first point, where it is lower than the required 6 cm (this is necessary due to the necessity to the number of mesh elements which must be an integer, thus leading to rounding). This is shown also in the *Res* variable which returns the actual resolution. -Like for the **ToFu_Geom** plotting routines, the 'Elt' keyword argument provides you with the possibility of choosing what is going to be plotted (the knots 'K', the centers 'C' and/or the numbers 'N'). - -The Mesh2D object class relies on the same basics, except that its multi-dimensional nature means that it has extra methods for easy handling of mesh elements. Let us for example create a coarse 2D mesh using 2 different 1D mesh objects: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 37-42,44 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M2_Raw.png - :height: 400px - :width: 400 px - :scale: 100 % - :alt: Arbitrary 2D mesh with customized resolution in chosen regions - :align: center - - Arbitrary 2D mesh with customized resolution in chosen regions - - -The Mesh2D class comes with a method to automatically create another Mesh2D object that can be seen as a sub-mesh (only the elements lying inside an input polygon are kept, the rest being memorized only as 'Background'). In our example, we can use a specific method of the TFG.Tor object class to create a smooth convex polygon lying inside the Tor enveloppe (see the kwdargs for customization of the smoothing and offset) to concentrate on the region where most SXR radiation comes from: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 47-51,54 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M2.png - :height: 500px - :width: 400 px - :scale: 100 % - :alt: Submesh of the 2D mesh with customized resolution in chosen regions with selected elements only (using an input polygon) - :align: center - - Submesh of the 2D mesh with customized resolution in chosen regions with selected elements only (using an input polygon) - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M2_Res.png - :height: 500px - :width: 500 px - :scale: 100 % - :alt: Local spatial resolution of the created 2D mesh (both linear and surface) - :align: center - - Local spatial resolution of the created 2D mesh (both linear and surface) - -Here, the 'NLim' kwdarg is used to specifiy how many corners of a mesh element must lie inside the input polygon so that this mesh element can be counted in. - -Now, the Mesh2D object class provides tools to easily select and plot chosen elements of the 2D mesh. For example, if you want to get the coordinates of the four knots associated to the mesh element number 50, you can use the attribute 'Centers_Knotsind' to get them, and then plot them: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 57-59,63 - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 60-61 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M2_Cents.png - :height: 400px - :width: 400 px - :scale: 100 % - :alt: Selected mesh element and its associated knots - :align: center - - Selected mesh element and its associated Knots - -Similarly, you can get and plot all the mesh element centers associated to knots number 160, 655 and 1000: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 65-68,72 - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 69-70 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_M2_Knots.png - :height: 400px - :width: 400 px - :scale: 100 % - :alt: Selected mesh knots and their associated mesh elements - :align: center - - Selected mesh knots and their associated mesh elements - -The Mesh3D object class is currently being built... to be finished. - -Now that we have access to a mesh, we can build basis functions on it. The basis functions available in **ToFu_Mesh** are all B-splines, as illustrated below. - - -The BaseFunc1D, BaseFunc2D and BaseFunc3D object classes --------------------------------------------------------- - -The use of B-spline allows for more flexibility and more accuracy than the standard pixels (which are B-splines of degree 0). Indeed, most of the tomographic algorithms using series expansion in physical space assess the regularity of the solution by computing the integral of a norm of one of its derivatives. While the use of pixels forces you to use discrete approximations of the derivative operators, the use of B-splines of sufficient degree allows to use an exact formulation of the derivative operators. - -The attributes of a BaseFunc1D objects are the following: - -.. list-table:: The attributes of a BaseFunc1D object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.Mesh - - The Mesh1D object on which the basis functions are built - * - self.LFunc, self.NFunc, self.Deg, self.Bound - - The list of basis functions and the number of basis functions (self.NFunc=len(self.LFunc)), the degree of the basis functions, and the boundary condition (only 0 implemented so far, all points have 1 multiplicity) - * - self.Func_Centsind, self.Func_Knotsind, self.Func_PMax - - An array giving the correspondence index between each basis function and all its associated mesh centers (and there are methods to go the other way around), its associated mesh knots, and the position of the maximum of each basis function (either oa mesh center or a knot depending on its degree). - -Other quantities, indices or functions of interest are not stored as attributes, but instead accessible through methods, as will be illustrated in the following: - -One of the most common issues in SXR tomography on Tokamaks is the boundary constraint that one must enforce at the plasma edge to force the SXR emissivity field to smoothlty decrease to zero in order to avoid artefacts on the tomographic reconstructions. With pixels, this usually has to be done by adding artificial detectors that 'see' the edge pixels only and are associated to a 'measured' value of zero (and the regularisation process does the rest). With B-splines of degree 2 for example, this constraint can be built-in the basis functions and enforced without having to add any artificial constraint, provided the underlying mesh is created accordingly, as illustrated in the following example, where BaseFunc1D object of degree 2 is created and a method is used to fit its coefficients to an input gaussian-like function: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 76-80,82 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF1.png - :height: 300px - :width: 500 px - :scale: 100 % - :alt: 1D B-splines of a BaseFunc1D object, with arbitrary coefficients to create a gaussian-like profile - :align: center - - 1D B-splines of a BaseFunc1D object, with arbitrary coefficients to create a gaussian-like profile - -By construction, and because we have only used points with multiplicity equal to one so far, the profile can only decrease smoothly to zero near the edge. - -The BaseFunc1D object also comes with methods to compute and plot local values its derivatives, or of some operators of interest that rely on derivatives. In particular, the following example shows the plots of the first derivative, the second derivative and a quantity called the Fisher Information that is the first derivative squared and divided by the function value. As usual, the 'Elt' kwdarg is used to specify whether we want only the total function ('T') or the detail of the list of all the underlying B-splines ('L', which is not possible for non-linear operators): - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 84-86,88 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF1_Deriv.png - :height: 300px - :width: 500 px - :scale: 100 % - :alt: Some quantities of interest, based on derivative operators, for the chosen BaseFunc1D object - :align: center - - Some quantities of interest, based on derivative operators, for the chosen BaseFunc1D object - -This was done using the 'Deriv' kwdarg, which can take several values, as shown in the table below: - -.. list-table:: The available values of the 'Deriv' keyword argument for a BaseFunc1D object - :widths: 10 40 - :header-rows: 1 - - * - Value - - Description - * - 0, 1, 2, 3 or 'D0', 'D1', 'D2', 'D3' - - Respectively the B-splines themselves (0-th order derivative), the first, second and third order derivative - * - 'D0N2', 'D1N2', 'D2N2', 'D3N2' - - The squared norm of the 0th, 1st, 2nd and 3rd order derivatives - * - 'D1FI' - - The Fisher Information, which is the squared norm of the 1st order derivative, divided by the function value - -Keep in mind that we are only using exact derivatives here, so the current version of **ToFu_Mesh** does not provide discretised operators and you have to make sure that you only compute derivatives for B-splines of sufficiently high degree. - -Finally, the BaseFunc1D object also comes with methods to compute the value of the integral of the previous operators on the support of the B-spline. When it is possible, another method also returns the matrix that can be used to compute this integral using a vector of coefficients for the B-splines, along with a flag 'm' that indicates how the matrix should be used: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 91-94 - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 95-96 - -When m==0, it means that A is a vector (Int=A*Coefs), and when m==1, it means A is matrix and the integral requires a square operation (Int=Coefs*A*Coefs). -The following integrals are implemented: - -.. list-table:: The available values of the 'Deriv' keyword argument for integral computation - :widths: 10 40 - :header-rows: 1 - - * - Value - - Description - * - 0 or 'D0' - - The integrals of the B-splines themselves (0-th order derivative, integrals of higher order derivatives are all zero) - * - 'D0N2', 'D1N2', 'D2N2', 'D3N2' - - The integrals of the squared norm of the 0th, 1st, 2nd and 3rd order derivatives (only 0-th order derivative implemented so far, for Deg=0,1, but not for Deg=2,3) - * - 'D1FI' - - The integrated Fisher Information, not implemented so far - -Finally, you can also plot a series of selected basis functions and there associated mesh elements (useful for detailed analysis and for debugging). Note tht you can also provide a 'Coefs' vector if you do not wish to use the default Coefs=1. value for representation. - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 100,102 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF1_Select.png - :height: 300px - :width: 500 px - :scale: 100 % - :alt: Some selected basis functions and their associated mesh centers and knots - :align: center - - Some selected basis functions and their associated mesh centers and knots - -All these functionalities are also found in the BaseFunc2D object, which additionally provides specific attributes and methods: - -.. list-table:: The attributes of a BaseFunc2D object - :widths: 10 40 - :header-rows: 1 - - * - Attribute - - Description - * - self.ID - - The ID class of the object - * - self.Mesh - - The Mesh2D object on which the basis functions are built - * - self.LFunc, self.NFunc, self.Deg, self.Bound - - The list of basis functions and the number of basis functions (self.NFunc=len(self.LFunc)), the degree of the basis functions, and the boundary condition (only 0 implemented so far, all points have 1 multiplicity) - * - self.Func_Centsind, self.Func_Knotsind, self.Func_PMax - - An array giving the correspondence index between each basis function and all its associated mesh centers (and there are methods to go the other way around), its associated mesh knots, and the position of the maximum of each basis function (either oa mesh center or a knot depending on its degree). - * - self.FuncInterFunc - - An array containing indices of all neighbouring basis functions of each basis function (neighbouring in the sense that the intersection of their respective supports is non-zero) - -Due to its 2D nature, the BaseFunc2D object class is also equiped with methods to get the support (self.get_SuppRZ) and quadrature points (self.get_quadPoints) of each basis function. - -Like the BaseFunc1D object, it provides a method for a least square fit of an input function. In the following example, the coefficients are determined using this method and then fed to various plotting methods used to visalise the function itself or some of its derivatives: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 106-139,142 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF2.png - :height: 300px - :width: 500 px - :scale: 100 % - :alt: Input 2D emissivity model and fitted BaseFunc2D - :align: center - - Input 2D emissivity model and fitted BaseFunc2D - - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF2_Deriv.png - :height: 800px - :width: 1200 px - :scale: 100 % - :alt: Series of derivatives or local quantities of interest of the fitted BaseFunc2D object - :align: center - - Series of derivatives or local quantities of interest of the fitted BaseFunc2D object - -Like for the BaseFunc1D object, and in order to facilitate detailed analysis and possibly debugging, you can also plot the key points, support and value of some selected basis functions of your choice: - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 146,148,150 - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF2_Int1.png - :height: 400px - :width: 400 px - :scale: 100 % - :alt: Local values of the selected local basis functions, with the underlying mesh - :align: center - - Local values of the selected local basis functions, with the underlying mesh - -.. figure:: figures_doc/Fig_Tutor_ToFuMesh_BF2_Int2.png - :height: 400px - :width: 400 px - :scale: 100 % - :alt: Support and PMax of the selected local basis functions, with the underlying mesh and centers and knots associated to the selected local basis functions - :align: center - - Support and PMax of the selected local basis functions, with the underlying mesh and centers and knots associated to the selected local basis functions - -Finally, you can access values and operators of interest regarding some integrated quantities like the squared norm of the gradient, the squared laplacian (to be finished)... - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 154-158 - -.. literalinclude:: ../../src/Tutorial_ToFu_Mesh.py - :language: python - :lines: 159-163 - -The following table lists the operators which are available in **ToFu_Mesh**, depending on the value of the kwdarg 'Deriv': - -.. list-table:: The available values of the 'Deriv' keyword argument for integral computation - :widths: 10 40 - :header-rows: 1 - - * - Value - - Description - * - 0 or 'D0' - - The integrals of the B-splines themselves (integrals of higher order derivatives are all zero) - * - 'D0N2', 'D1N2', 'D2N2', 'D3N2' - - The integrals of the squared norm of the 0th, 1st, 2nd and 3rd order derivatives (only 0-th order derivative implemented so far, for Deg=0,1, but not for Deg=2,3) - * - 'D1FI' - - The integrated Fisher Information, not implemented so far - - - -.. Local Variables: -.. mode: rst -.. End: diff --git a/_Old_doc/build/html/_sources/index.txt b/_Old_doc/build/html/_sources/index.txt deleted file mode 100644 index 16ce36a9c..000000000 --- a/_Old_doc/build/html/_sources/index.txt +++ /dev/null @@ -1,27 +0,0 @@ -.. ToFu documentation master file, created by - sphinx-quickstart on Fri Sep 26 19:30:14 2014. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to ToFu's documentation! -================================ - -Contents: - -.. toctree:: - :maxdepth: 2 - - overview - ToFu_Geom - ToFu_Mesh - ToFu_MatComp - ToFu_Inv - - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - diff --git a/_Old_doc/build/html/_sources/overview.txt b/_Old_doc/build/html/_sources/overview.txt deleted file mode 100644 index 5859d95ee..000000000 --- a/_Old_doc/build/html/_sources/overview.txt +++ /dev/null @@ -1,86 +0,0 @@ -.. role:: envvar(literal) -.. role:: command(literal) -.. role:: file(literal) -.. role:: ref(title-reference) -.. _overview: - -**Overview** -============ - -(This project is not finalised yet, work in progress...) - - -**ToFu**, which stands for "TOmography for FUsion" is a python package (with parts in C/C++) providing all necessary tools for tomography diagnostics for the Fusion community, it is particularly relevant for X-ray and bolometer diagnostics on Tokamaks. On of the objectoves is to provide a common tool for tomographic inversions, with both accurate methods and enough flexibility to be easily adapted to any Tokamak and to the specific requirements of each user. The main language (Python) has been chosen for its open-source philosophy, for its object-oriented capacities, and for the good performance / flexibility ratio that it offers. The architecture of the **ToFu** package is intended to be modular to allow again for maximum flexibility and to facilitate customisation and evolutivity from the users. - -**ToFu**: provides in particular, but not only, the main following functionnalities : - - Using the 3D geometry of the diagnostic (positions of detectors and apertures are provided as inputs) to compute quantities of interest (e.g.: the optimal line of sight, the exact etendue..). This is done by the module ToFu_Geom. - - Building of a variable grid size mesh for spatial discretisation of the solution (i.e. emissivity field) on which B-splines of any degree can be added to serve as Local Basis Functions. This is done by the module ToFu_Mesh. - - Computing of the geometry matrix associated to a set of detectors and a set of basis functions, both with a full 3D approach or with a Line Of Sight (LOS) approximation. This is done by the module ToFu_MatComp, which uses both ToFu_Geom and ToFu_Mesh. - - Computing tomographic inversions based on the constructed geometry matrix and Phillips-Tikhonov inversion with a choice of objective functionals (among which first order and second order derivatives or Fisher information, and more to come). This is done by the module ToFu_Inv, which uses the matrix computed by ToFu_MatComp. - - Visualizing, exploring and interpreting the resulting inversions using a built-in Graphic User Interface. - -The joint use of a full 3D approach and of regular basis functions (B-splines) allows for advanced functionalities and flexibility, like in particular : - - Accurate computation of etendue and geometry matrix. - - Exact differential operators (provided sufficient degree of the basis function) instead of discretised operators (this feature and the previous one aim at improving the accuracy of tomographic inversions). - - Accurate description of toroidal-viewing detectors with potentially large viewing cones and for which the LOS approximation cannot be used. - - Making possible 3D inversions (provided the geometrical coverage of the plasma volume is sufficient, for example thanks to toroidal-viewing detectors). - - Enabling proper taking into acccount of anisotropic radiation (for example due to fast electrons due to disruptions). - -The **ToFu** package has built-in mesh and B-spline definitions, however, if used alone, it can only create and handle rectangular mesh (with variable grid size though). In order to allow for more optimised mesh and basis functions, the **ToFu** package is fully compatible with **Pigasus** (and **CAID**), which is a another Python package (with a Fortran core), which uses cutting-edge technologies from Computer-Assisted Design (CAD) to create optimised mesh (using Non-Unifrom - Rational B-Splines, or NURBS, curves) on which it can also add several different types of regular basis functions. It is a next-gen solution for optimisation of plasma-physics simulation codes. Hence, the final idea is that the same mesh and tools can be used for running CPU-expensive plasma physics simulations and, from their output, to compute the associated simulated measurements on any radiation diagnostics. This synthetic diagnostic approach is aimed at facilitating direct - comparisons between simulations and experimental measurements and at providing the community with flexible and cross-compatible tools to fit their needs. Plasma physics codes that are planning on using **Pigasus** in a near future include in particuler **JOREK** (in its **Django** version) and **GISELA** (**CELALIB** in its next version). More information about **Pigasus** (lien), **JOREK** (lien) and **GISELA** can be found on their respective pages. - -In order to avoid too much dependency issues, the **ToFu** package resorts to widely used Python libraries like scipy, numpy and matplotlib. Whenever it was possible, the idea was either to use a very common and accessible library or to have built-in methods doing the job. It can be run as a stand-alone on an offline computer (i.e.: on a laptop while travelling), in an online mode (using a central database on the internet) and with or without **Pigasus** (keeping in mind that only rectangular mesh can be created without it). - -For faster computation, some modules and/or methods are coded with Cython or Boost.Pyton. It is also intended to be MPI and OpenMP parallelized. - -The general architecture is briefly represented in the following figure: - -.. figure:: /figures_doc/Fig_Tutor_BigPicture_General.png - :height: 700px - :width: 1000px - :scale: 100 % - :alt: ToFu big picture - :align: center - - Modular architecture of ToFu, with its main modules. - -This general overview shows all the **ToFu** modules and their main functionnalities and dependancies. Particularly important are the modules **ToFu_Geom**, **ToFu_Mesh** and **ToFu_MatComp** which provide all necessary tools to pre-calculate the geometry matrix which is a key feature of the two main uses of **ToFu**. - -On the one hand, **ToFu** can be used as a synthetic diagnostic since from a simulated emissivity field it can compute the corresponding synthetic measurements for comparison with experimental measurements. This, as illustrated below, can be done in different ways depending on whether the simualted is used directly as a function, projected on a predefined mesh of the plasma volume, or if the simulated emissivity itself was computed on a mesh using the **Pigasus/CAID** code suite which is directly compatible with **ToFu**. These three possibilities are illustrated in the following figure: - -.. figure:: /figures_doc/Fig_Tutor_BigPicture_SynthDiag.png - :height: 700px - :width: 1000px - :scale: 100 % - :alt: ToFu big picture for synthetic diagnostics - :align: center - - Modular architecture of ToFu, with its main modules for synthetic diagnostics. - -On the other hand, **ToFu** can be used the other way around : use the experimental measurements to compute a reconstructed experimental emissivity field via a tomographic inversion, for comparisopn with a simulated emissivity field or simply for getting an idea of what the emissivity field looks like, which is illustrated in the following figure: - -.. figure:: /figures_doc/Fig_Tutor_BigPicture_Tomo.png - :height: 700px - :width: 1000px - :scale: 100 % - :alt: ToFu big picture for tomography - :align: center - - Modular architecture of ToFu, with its main modules for tomography. - -The following will go into further details regarding each module. - - -ToDo list: - - Rest of documentation, with relevant references (like :cite:Ingesson08FST) and figures - - Tutorial - - ToFu_Inv - - GUI (one for each module) - - Accelerate existing modules with Cython, Boost.Python + Parallelization - - Use it to do some physics at last !!! - - -.. Local Variables: -.. mode: rst -.. End: diff --git a/_Old_doc/build/html/_static/ajax-loader.gif b/_Old_doc/build/html/_static/ajax-loader.gif deleted file mode 100644 index 61faf8cab..000000000 Binary files a/_Old_doc/build/html/_static/ajax-loader.gif and /dev/null differ diff --git a/_Old_doc/build/html/_static/basic.css b/_Old_doc/build/html/_static/basic.css deleted file mode 100644 index 43e8bafaf..000000000 --- a/_Old_doc/build/html/_static/basic.css +++ /dev/null @@ -1,540 +0,0 @@ -/* - * basic.css - * ~~~~~~~~~ - * - * Sphinx stylesheet -- basic theme. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/* -- main layout ----------------------------------------------------------- */ - -div.clearer { - clear: both; -} - -/* -- relbar ---------------------------------------------------------------- */ - -div.related { - width: 100%; - font-size: 90%; -} - -div.related h3 { - display: none; -} - -div.related ul { - margin: 0; - padding: 0 0 0 10px; - list-style: none; -} - -div.related li { - display: inline; -} - -div.related li.right { - float: right; - margin-right: 5px; -} - -/* -- sidebar --------------------------------------------------------------- */ - -div.sphinxsidebarwrapper { - padding: 10px 5px 0 10px; -} - -div.sphinxsidebar { - float: left; - width: 230px; - margin-left: -100%; - font-size: 90%; -} - -div.sphinxsidebar ul { - list-style: none; -} - -div.sphinxsidebar ul ul, -div.sphinxsidebar ul.want-points { - margin-left: 20px; - list-style: square; -} - -div.sphinxsidebar ul ul { - margin-top: 0; - margin-bottom: 0; -} - -div.sphinxsidebar form { - margin-top: 10px; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - -div.sphinxsidebar #searchbox input[type="text"] { - width: 170px; -} - -div.sphinxsidebar #searchbox input[type="submit"] { - width: 30px; -} - -img { - border: 0; -} - -/* -- search page ----------------------------------------------------------- */ - -ul.search { - margin: 10px 0 0 20px; - padding: 0; -} - -ul.search li { - padding: 5px 0 5px 20px; - background-image: url(file.png); - background-repeat: no-repeat; - background-position: 0 7px; -} - -ul.search li a { - font-weight: bold; -} - -ul.search li div.context { - color: #888; - margin: 2px 0 0 30px; - text-align: left; -} - -ul.keywordmatches li.goodmatch a { - font-weight: bold; -} - -/* -- index page ------------------------------------------------------------ */ - -table.contentstable { - width: 90%; -} - -table.contentstable p.biglink { - line-height: 150%; -} - -a.biglink { - font-size: 1.3em; -} - -span.linkdescr { - font-style: italic; - padding-top: 5px; - font-size: 90%; -} - -/* -- general index --------------------------------------------------------- */ - -table.indextable { - width: 100%; -} - -table.indextable td { - text-align: left; - vertical-align: top; -} - -table.indextable dl, table.indextable dd { - margin-top: 0; - margin-bottom: 0; -} - -table.indextable tr.pcap { - height: 10px; -} - -table.indextable tr.cap { - margin-top: 10px; - background-color: #f2f2f2; -} - -img.toggler { - margin-right: 3px; - margin-top: 3px; - cursor: pointer; -} - -div.modindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -div.genindex-jumpbox { - border-top: 1px solid #ddd; - border-bottom: 1px solid #ddd; - margin: 1em 0 1em 0; - padding: 0.4em; -} - -/* -- general body styles --------------------------------------------------- */ - -a.headerlink { - visibility: hidden; -} - -h1:hover > a.headerlink, -h2:hover > a.headerlink, -h3:hover > a.headerlink, -h4:hover > a.headerlink, -h5:hover > a.headerlink, -h6:hover > a.headerlink, -dt:hover > a.headerlink { - visibility: visible; -} - -div.body p.caption { - text-align: inherit; -} - -div.body td { - text-align: left; -} - -.field-list ul { - padding-left: 1em; -} - -.first { - margin-top: 0 !important; -} - -p.rubric { - margin-top: 30px; - font-weight: bold; -} - -img.align-left, .figure.align-left, object.align-left { - clear: left; - float: left; - margin-right: 1em; -} - -img.align-right, .figure.align-right, object.align-right { - clear: right; - float: right; - margin-left: 1em; -} - -img.align-center, .figure.align-center, object.align-center { - display: block; - margin-left: auto; - margin-right: auto; -} - -.align-left { - text-align: left; -} - -.align-center { - text-align: center; -} - -.align-right { - text-align: right; -} - -/* -- sidebars -------------------------------------------------------------- */ - -div.sidebar { - margin: 0 0 0.5em 1em; - border: 1px solid #ddb; - padding: 7px 7px 0 7px; - background-color: #ffe; - width: 40%; - float: right; -} - -p.sidebar-title { - font-weight: bold; -} - -/* -- topics ---------------------------------------------------------------- */ - -div.topic { - border: 1px solid #ccc; - padding: 7px 7px 0 7px; - margin: 10px 0 10px 0; -} - -p.topic-title { - font-size: 1.1em; - font-weight: bold; - margin-top: 10px; -} - -/* -- admonitions ----------------------------------------------------------- */ - -div.admonition { - margin-top: 10px; - margin-bottom: 10px; - padding: 7px; -} - -div.admonition dt { - font-weight: bold; -} - -div.admonition dl { - margin-bottom: 0; -} - -p.admonition-title { - margin: 0px 10px 5px 0px; - font-weight: bold; -} - -div.body p.centered { - text-align: center; - margin-top: 25px; -} - -/* -- tables ---------------------------------------------------------------- */ - -table.docutils { - border: 0; - border-collapse: collapse; -} - -table.docutils td, table.docutils th { - padding: 1px 8px 1px 5px; - border-top: 0; - border-left: 0; - border-right: 0; - border-bottom: 1px solid #aaa; -} - -table.field-list td, table.field-list th { - border: 0 !important; -} - -table.footnote td, table.footnote th { - border: 0 !important; -} - -th { - text-align: left; - padding-right: 5px; -} - -table.citation { - border-left: solid 1px gray; - margin-left: 1px; -} - -table.citation td { - border-bottom: none; -} - -/* -- other body styles ----------------------------------------------------- */ - -ol.arabic { - list-style: decimal; -} - -ol.loweralpha { - list-style: lower-alpha; -} - -ol.upperalpha { - list-style: upper-alpha; -} - -ol.lowerroman { - list-style: lower-roman; -} - -ol.upperroman { - list-style: upper-roman; -} - -dl { - margin-bottom: 15px; -} - -dd p { - margin-top: 0px; -} - -dd ul, dd table { - margin-bottom: 10px; -} - -dd { - margin-top: 3px; - margin-bottom: 10px; - margin-left: 30px; -} - -dt:target, .highlighted { - background-color: #fbe54e; -} - -dl.glossary dt { - font-weight: bold; - font-size: 1.1em; -} - -.field-list ul { - margin: 0; - padding-left: 1em; -} - -.field-list p { - margin: 0; -} - -.refcount { - color: #060; -} - -.optional { - font-size: 1.3em; -} - -.versionmodified { - font-style: italic; -} - -.system-message { - background-color: #fda; - padding: 5px; - border: 3px solid red; -} - -.footnote:target { - background-color: #ffa; -} - -.line-block { - display: block; - margin-top: 1em; - margin-bottom: 1em; -} - -.line-block .line-block { - margin-top: 0; - margin-bottom: 0; - margin-left: 1.5em; -} - -.guilabel, .menuselection { - font-family: sans-serif; -} - -.accelerator { - text-decoration: underline; -} - -.classifier { - font-style: oblique; -} - -abbr, acronym { - border-bottom: dotted 1px; - cursor: help; -} - -/* -- code displays --------------------------------------------------------- */ - -pre { - overflow: auto; - overflow-y: hidden; /* fixes display issues on Chrome browsers */ -} - -td.linenos pre { - padding: 5px 0px; - border: 0; - background-color: transparent; - color: #aaa; -} - -table.highlighttable { - margin-left: 0.5em; -} - -table.highlighttable td { - padding: 0 0.5em 0 0.5em; -} - -tt.descname { - background-color: transparent; - font-weight: bold; - font-size: 1.2em; -} - -tt.descclassname { - background-color: transparent; -} - -tt.xref, a tt { - background-color: transparent; - font-weight: bold; -} - -h1 tt, h2 tt, h3 tt, h4 tt, h5 tt, h6 tt { - background-color: transparent; -} - -.viewcode-link { - float: right; -} - -.viewcode-back { - float: right; - font-family: sans-serif; -} - -div.viewcode-block:target { - margin: -1px -10px; - padding: 0 10px; -} - -/* -- math display ---------------------------------------------------------- */ - -img.math { - vertical-align: middle; -} - -div.body div.math p { - text-align: center; -} - -span.eqno { - float: right; -} - -/* -- printout stylesheet --------------------------------------------------- */ - -@media print { - div.document, - div.documentwrapper, - div.bodywrapper { - margin: 0 !important; - width: 100%; - } - - div.sphinxsidebar, - div.related, - div.footer, - #top-link { - display: none; - } -} \ No newline at end of file diff --git a/_Old_doc/build/html/_static/comment-bright.png b/_Old_doc/build/html/_static/comment-bright.png deleted file mode 100644 index 551517b8c..000000000 Binary files a/_Old_doc/build/html/_static/comment-bright.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/comment-close.png b/_Old_doc/build/html/_static/comment-close.png deleted file mode 100644 index 09b54be46..000000000 Binary files a/_Old_doc/build/html/_static/comment-close.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/comment.png b/_Old_doc/build/html/_static/comment.png deleted file mode 100644 index 92feb52b8..000000000 Binary files a/_Old_doc/build/html/_static/comment.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/default.css b/_Old_doc/build/html/_static/default.css deleted file mode 100644 index 21f3f5098..000000000 --- a/_Old_doc/build/html/_static/default.css +++ /dev/null @@ -1,256 +0,0 @@ -/* - * default.css_t - * ~~~~~~~~~~~~~ - * - * Sphinx stylesheet -- default theme. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -@import url("basic.css"); - -/* -- page layout ----------------------------------------------------------- */ - -body { - font-family: sans-serif; - font-size: 100%; - background-color: #11303d; - color: #000; - margin: 0; - padding: 0; -} - -div.document { - background-color: #1c4e63; -} - -div.documentwrapper { - float: left; - width: 100%; -} - -div.bodywrapper { - margin: 0 0 0 230px; -} - -div.body { - background-color: #ffffff; - color: #000000; - padding: 0 20px 30px 20px; -} - -div.footer { - color: #ffffff; - width: 100%; - padding: 9px 0 9px 0; - text-align: center; - font-size: 75%; -} - -div.footer a { - color: #ffffff; - text-decoration: underline; -} - -div.related { - background-color: #133f52; - line-height: 30px; - color: #ffffff; -} - -div.related a { - color: #ffffff; -} - -div.sphinxsidebar { -} - -div.sphinxsidebar h3 { - font-family: 'Trebuchet MS', sans-serif; - color: #ffffff; - font-size: 1.4em; - font-weight: normal; - margin: 0; - padding: 0; -} - -div.sphinxsidebar h3 a { - color: #ffffff; -} - -div.sphinxsidebar h4 { - font-family: 'Trebuchet MS', sans-serif; - color: #ffffff; - font-size: 1.3em; - font-weight: normal; - margin: 5px 0 0 0; - padding: 0; -} - -div.sphinxsidebar p { - color: #ffffff; -} - -div.sphinxsidebar p.topless { - margin: 5px 10px 10px 10px; -} - -div.sphinxsidebar ul { - margin: 10px; - padding: 0; - color: #ffffff; -} - -div.sphinxsidebar a { - color: #98dbcc; -} - -div.sphinxsidebar input { - border: 1px solid #98dbcc; - font-family: sans-serif; - font-size: 1em; -} - - - -/* -- hyperlink styles ------------------------------------------------------ */ - -a { - color: #355f7c; - text-decoration: none; -} - -a:visited { - color: #355f7c; - text-decoration: none; -} - -a:hover { - text-decoration: underline; -} - - - -/* -- body styles ----------------------------------------------------------- */ - -div.body h1, -div.body h2, -div.body h3, -div.body h4, -div.body h5, -div.body h6 { - font-family: 'Trebuchet MS', sans-serif; - background-color: #f2f2f2; - font-weight: normal; - color: #20435c; - border-bottom: 1px solid #ccc; - margin: 20px -20px 10px -20px; - padding: 3px 0 3px 10px; -} - -div.body h1 { margin-top: 0; font-size: 200%; } -div.body h2 { font-size: 160%; } -div.body h3 { font-size: 140%; } -div.body h4 { font-size: 120%; } -div.body h5 { font-size: 110%; } -div.body h6 { font-size: 100%; } - -a.headerlink { - color: #c60f0f; - font-size: 0.8em; - padding: 0 4px 0 4px; - text-decoration: none; -} - -a.headerlink:hover { - background-color: #c60f0f; - color: white; -} - -div.body p, div.body dd, div.body li { - text-align: justify; - line-height: 130%; -} - -div.admonition p.admonition-title + p { - display: inline; -} - -div.admonition p { - margin-bottom: 5px; -} - -div.admonition pre { - margin-bottom: 5px; -} - -div.admonition ul, div.admonition ol { - margin-bottom: 5px; -} - -div.note { - background-color: #eee; - border: 1px solid #ccc; -} - -div.seealso { - background-color: #ffc; - border: 1px solid #ff6; -} - -div.topic { - background-color: #eee; -} - -div.warning { - background-color: #ffe4e4; - border: 1px solid #f66; -} - -p.admonition-title { - display: inline; -} - -p.admonition-title:after { - content: ":"; -} - -pre { - padding: 5px; - background-color: #eeffcc; - color: #333333; - line-height: 120%; - border: 1px solid #ac9; - border-left: none; - border-right: none; -} - -tt { - background-color: #ecf0f3; - padding: 0 1px 0 1px; - font-size: 0.95em; -} - -th { - background-color: #ede; -} - -.warning tt { - background: #efc2c2; -} - -.note tt { - background: #d6d6d6; -} - -.viewcode-back { - font-family: sans-serif; -} - -div.viewcode-block:target { - background-color: #f4debf; - border-top: 1px solid #ac9; - border-bottom: 1px solid #ac9; -} \ No newline at end of file diff --git a/_Old_doc/build/html/_static/doctools.js b/_Old_doc/build/html/_static/doctools.js deleted file mode 100644 index d4619fdfb..000000000 --- a/_Old_doc/build/html/_static/doctools.js +++ /dev/null @@ -1,247 +0,0 @@ -/* - * doctools.js - * ~~~~~~~~~~~ - * - * Sphinx JavaScript utilities for all documentation. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/** - * select a different prefix for underscore - */ -$u = _.noConflict(); - -/** - * make the code below compatible with browsers without - * an installed firebug like debugger -if (!window.console || !console.firebug) { - var names = ["log", "debug", "info", "warn", "error", "assert", "dir", - "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", - "profile", "profileEnd"]; - window.console = {}; - for (var i = 0; i < names.length; ++i) - window.console[names[i]] = function() {}; -} - */ - -/** - * small helper function to urldecode strings - */ -jQuery.urldecode = function(x) { - return decodeURIComponent(x).replace(/\+/g, ' '); -} - -/** - * small helper function to urlencode strings - */ -jQuery.urlencode = encodeURIComponent; - -/** - * This function returns the parsed url parameters of the - * current request. Multiple values per key are supported, - * it will always return arrays of strings for the value parts. - */ -jQuery.getQueryParameters = function(s) { - if (typeof s == 'undefined') - s = document.location.search; - var parts = s.substr(s.indexOf('?') + 1).split('&'); - var result = {}; - for (var i = 0; i < parts.length; i++) { - var tmp = parts[i].split('=', 2); - var key = jQuery.urldecode(tmp[0]); - var value = jQuery.urldecode(tmp[1]); - if (key in result) - result[key].push(value); - else - result[key] = [value]; - } - return result; -}; - -/** - * small function to check if an array contains - * a given item. - */ -jQuery.contains = function(arr, item) { - for (var i = 0; i < arr.length; i++) { - if (arr[i] == item) - return true; - } - return false; -}; - -/** - * highlight a given string on a jquery object by wrapping it in - * span elements with the given class name. - */ -jQuery.fn.highlightText = function(text, className) { - function highlight(node) { - if (node.nodeType == 3) { - var val = node.nodeValue; - var pos = val.toLowerCase().indexOf(text); - if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { - var span = document.createElement("span"); - span.className = className; - span.appendChild(document.createTextNode(val.substr(pos, text.length))); - node.parentNode.insertBefore(span, node.parentNode.insertBefore( - document.createTextNode(val.substr(pos + text.length)), - node.nextSibling)); - node.nodeValue = val.substr(0, pos); - } - } - else if (!jQuery(node).is("button, select, textarea")) { - jQuery.each(node.childNodes, function() { - highlight(this); - }); - } - } - return this.each(function() { - highlight(this); - }); -}; - -/** - * Small JavaScript module for the documentation. - */ -var Documentation = { - - init : function() { - this.fixFirefoxAnchorBug(); - this.highlightSearchWords(); - this.initIndexTable(); - }, - - /** - * i18n support - */ - TRANSLATIONS : {}, - PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, - LOCALE : 'unknown', - - // gettext and ngettext don't access this so that the functions - // can safely bound to a different name (_ = Documentation.gettext) - gettext : function(string) { - var translated = Documentation.TRANSLATIONS[string]; - if (typeof translated == 'undefined') - return string; - return (typeof translated == 'string') ? translated : translated[0]; - }, - - ngettext : function(singular, plural, n) { - var translated = Documentation.TRANSLATIONS[singular]; - if (typeof translated == 'undefined') - return (n == 1) ? singular : plural; - return translated[Documentation.PLURALEXPR(n)]; - }, - - addTranslations : function(catalog) { - for (var key in catalog.messages) - this.TRANSLATIONS[key] = catalog.messages[key]; - this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); - this.LOCALE = catalog.locale; - }, - - /** - * add context elements like header anchor links - */ - addContextElements : function() { - $('div[id] > :header:first').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this headline')). - appendTo(this); - }); - $('dt[id]').each(function() { - $('\u00B6'). - attr('href', '#' + this.id). - attr('title', _('Permalink to this definition')). - appendTo(this); - }); - }, - - /** - * workaround a firefox stupidity - */ - fixFirefoxAnchorBug : function() { - if (document.location.hash && $.browser.mozilla) - window.setTimeout(function() { - document.location.href += ''; - }, 10); - }, - - /** - * highlight the search words provided in the url in the text - */ - highlightSearchWords : function() { - var params = $.getQueryParameters(); - var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; - if (terms.length) { - var body = $('div.body'); - window.setTimeout(function() { - $.each(terms, function() { - body.highlightText(this.toLowerCase(), 'highlighted'); - }); - }, 10); - $('') - .appendTo($('#searchbox')); - } - }, - - /** - * init the domain index toggle buttons - */ - initIndexTable : function() { - var togglers = $('img.toggler').click(function() { - var src = $(this).attr('src'); - var idnum = $(this).attr('id').substr(7); - $('tr.cg-' + idnum).toggle(); - if (src.substr(-9) == 'minus.png') - $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); - else - $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); - }).css('display', ''); - if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { - togglers.click(); - } - }, - - /** - * helper function to hide the search marks again - */ - hideSearchWords : function() { - $('#searchbox .highlight-link').fadeOut(300); - $('span.highlighted').removeClass('highlighted'); - }, - - /** - * make the url absolute - */ - makeURL : function(relativeURL) { - return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; - }, - - /** - * get the current relative url - */ - getCurrentURL : function() { - var path = document.location.pathname; - var parts = path.split(/\//); - $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { - if (this == '..') - parts.pop(); - }); - var url = parts.join('/'); - return path.substring(url.lastIndexOf('/') + 1, path.length - 1); - } -}; - -// quick alias for translations -_ = Documentation.gettext; - -$(document).ready(function() { - Documentation.init(); -}); diff --git a/_Old_doc/build/html/_static/down-pressed.png b/_Old_doc/build/html/_static/down-pressed.png deleted file mode 100644 index 6f7ad7827..000000000 Binary files a/_Old_doc/build/html/_static/down-pressed.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/down.png b/_Old_doc/build/html/_static/down.png deleted file mode 100644 index 3003a8877..000000000 Binary files a/_Old_doc/build/html/_static/down.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/file.png b/_Old_doc/build/html/_static/file.png deleted file mode 100644 index d18082e39..000000000 Binary files a/_Old_doc/build/html/_static/file.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/jquery.js b/_Old_doc/build/html/_static/jquery.js deleted file mode 100644 index 7c2430802..000000000 --- a/_Old_doc/build/html/_static/jquery.js +++ /dev/null @@ -1,154 +0,0 @@ -/*! - * jQuery JavaScript Library v1.4.2 - * http://jquery.com/ - * - * Copyright 2010, John Resig - * Dual licensed under the MIT or GPL Version 2 licenses. - * http://jquery.org/license - * - * Includes Sizzle.js - * http://sizzlejs.com/ - * Copyright 2010, The Dojo Foundation - * Released under the MIT, BSD, and GPL Licenses. - * - * Date: Sat Feb 13 22:33:48 2010 -0500 - */ -(function(A,w){function ma(){if(!c.isReady){try{s.documentElement.doScroll("left")}catch(a){setTimeout(ma,1);return}c.ready()}}function Qa(a,b){b.src?c.ajax({url:b.src,async:false,dataType:"script"}):c.globalEval(b.text||b.textContent||b.innerHTML||"");b.parentNode&&b.parentNode.removeChild(b)}function X(a,b,d,f,e,j){var i=a.length;if(typeof b==="object"){for(var o in b)X(a,o,b[o],f,e,d);return a}if(d!==w){f=!j&&f&&c.isFunction(d);for(o=0;o)[^>]*$|^#([\w-]+)$/,Ua=/^.[^:#\[\.,]*$/,Va=/\S/, -Wa=/^(\s|\u00A0)+|(\s|\u00A0)+$/g,Xa=/^<(\w+)\s*\/?>(?:<\/\1>)?$/,P=navigator.userAgent,xa=false,Q=[],L,$=Object.prototype.toString,aa=Object.prototype.hasOwnProperty,ba=Array.prototype.push,R=Array.prototype.slice,ya=Array.prototype.indexOf;c.fn=c.prototype={init:function(a,b){var d,f;if(!a)return this;if(a.nodeType){this.context=this[0]=a;this.length=1;return this}if(a==="body"&&!b){this.context=s;this[0]=s.body;this.selector="body";this.length=1;return this}if(typeof a==="string")if((d=Ta.exec(a))&& -(d[1]||!b))if(d[1]){f=b?b.ownerDocument||b:s;if(a=Xa.exec(a))if(c.isPlainObject(b)){a=[s.createElement(a[1])];c.fn.attr.call(a,b,true)}else a=[f.createElement(a[1])];else{a=sa([d[1]],[f]);a=(a.cacheable?a.fragment.cloneNode(true):a.fragment).childNodes}return c.merge(this,a)}else{if(b=s.getElementById(d[2])){if(b.id!==d[2])return T.find(a);this.length=1;this[0]=b}this.context=s;this.selector=a;return this}else if(!b&&/^\w+$/.test(a)){this.selector=a;this.context=s;a=s.getElementsByTagName(a);return c.merge(this, -a)}else return!b||b.jquery?(b||T).find(a):c(b).find(a);else if(c.isFunction(a))return T.ready(a);if(a.selector!==w){this.selector=a.selector;this.context=a.context}return c.makeArray(a,this)},selector:"",jquery:"1.4.2",length:0,size:function(){return this.length},toArray:function(){return R.call(this,0)},get:function(a){return a==null?this.toArray():a<0?this.slice(a)[0]:this[a]},pushStack:function(a,b,d){var f=c();c.isArray(a)?ba.apply(f,a):c.merge(f,a);f.prevObject=this;f.context=this.context;if(b=== -"find")f.selector=this.selector+(this.selector?" ":"")+d;else if(b)f.selector=this.selector+"."+b+"("+d+")";return f},each:function(a,b){return c.each(this,a,b)},ready:function(a){c.bindReady();if(c.isReady)a.call(s,c);else Q&&Q.push(a);return this},eq:function(a){return a===-1?this.slice(a):this.slice(a,+a+1)},first:function(){return this.eq(0)},last:function(){return this.eq(-1)},slice:function(){return this.pushStack(R.apply(this,arguments),"slice",R.call(arguments).join(","))},map:function(a){return this.pushStack(c.map(this, -function(b,d){return a.call(b,d,b)}))},end:function(){return this.prevObject||c(null)},push:ba,sort:[].sort,splice:[].splice};c.fn.init.prototype=c.fn;c.extend=c.fn.extend=function(){var a=arguments[0]||{},b=1,d=arguments.length,f=false,e,j,i,o;if(typeof a==="boolean"){f=a;a=arguments[1]||{};b=2}if(typeof a!=="object"&&!c.isFunction(a))a={};if(d===b){a=this;--b}for(;b
a"; -var e=d.getElementsByTagName("*"),j=d.getElementsByTagName("a")[0];if(!(!e||!e.length||!j)){c.support={leadingWhitespace:d.firstChild.nodeType===3,tbody:!d.getElementsByTagName("tbody").length,htmlSerialize:!!d.getElementsByTagName("link").length,style:/red/.test(j.getAttribute("style")),hrefNormalized:j.getAttribute("href")==="/a",opacity:/^0.55$/.test(j.style.opacity),cssFloat:!!j.style.cssFloat,checkOn:d.getElementsByTagName("input")[0].value==="on",optSelected:s.createElement("select").appendChild(s.createElement("option")).selected, -parentNode:d.removeChild(d.appendChild(s.createElement("div"))).parentNode===null,deleteExpando:true,checkClone:false,scriptEval:false,noCloneEvent:true,boxModel:null};b.type="text/javascript";try{b.appendChild(s.createTextNode("window."+f+"=1;"))}catch(i){}a.insertBefore(b,a.firstChild);if(A[f]){c.support.scriptEval=true;delete A[f]}try{delete b.test}catch(o){c.support.deleteExpando=false}a.removeChild(b);if(d.attachEvent&&d.fireEvent){d.attachEvent("onclick",function k(){c.support.noCloneEvent= -false;d.detachEvent("onclick",k)});d.cloneNode(true).fireEvent("onclick")}d=s.createElement("div");d.innerHTML="";a=s.createDocumentFragment();a.appendChild(d.firstChild);c.support.checkClone=a.cloneNode(true).cloneNode(true).lastChild.checked;c(function(){var k=s.createElement("div");k.style.width=k.style.paddingLeft="1px";s.body.appendChild(k);c.boxModel=c.support.boxModel=k.offsetWidth===2;s.body.removeChild(k).style.display="none"});a=function(k){var n= -s.createElement("div");k="on"+k;var r=k in n;if(!r){n.setAttribute(k,"return;");r=typeof n[k]==="function"}return r};c.support.submitBubbles=a("submit");c.support.changeBubbles=a("change");a=b=d=e=j=null}})();c.props={"for":"htmlFor","class":"className",readonly:"readOnly",maxlength:"maxLength",cellspacing:"cellSpacing",rowspan:"rowSpan",colspan:"colSpan",tabindex:"tabIndex",usemap:"useMap",frameborder:"frameBorder"};var G="jQuery"+J(),Ya=0,za={};c.extend({cache:{},expando:G,noData:{embed:true,object:true, -applet:true},data:function(a,b,d){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var f=a[G],e=c.cache;if(!f&&typeof b==="string"&&d===w)return null;f||(f=++Ya);if(typeof b==="object"){a[G]=f;e[f]=c.extend(true,{},b)}else if(!e[f]){a[G]=f;e[f]={}}a=e[f];if(d!==w)a[b]=d;return typeof b==="string"?a[b]:a}},removeData:function(a,b){if(!(a.nodeName&&c.noData[a.nodeName.toLowerCase()])){a=a==A?za:a;var d=a[G],f=c.cache,e=f[d];if(b){if(e){delete e[b];c.isEmptyObject(e)&&c.removeData(a)}}else{if(c.support.deleteExpando)delete a[c.expando]; -else a.removeAttribute&&a.removeAttribute(c.expando);delete f[d]}}}});c.fn.extend({data:function(a,b){if(typeof a==="undefined"&&this.length)return c.data(this[0]);else if(typeof a==="object")return this.each(function(){c.data(this,a)});var d=a.split(".");d[1]=d[1]?"."+d[1]:"";if(b===w){var f=this.triggerHandler("getData"+d[1]+"!",[d[0]]);if(f===w&&this.length)f=c.data(this[0],a);return f===w&&d[1]?this.data(d[0]):f}else return this.trigger("setData"+d[1]+"!",[d[0],b]).each(function(){c.data(this, -a,b)})},removeData:function(a){return this.each(function(){c.removeData(this,a)})}});c.extend({queue:function(a,b,d){if(a){b=(b||"fx")+"queue";var f=c.data(a,b);if(!d)return f||[];if(!f||c.isArray(d))f=c.data(a,b,c.makeArray(d));else f.push(d);return f}},dequeue:function(a,b){b=b||"fx";var d=c.queue(a,b),f=d.shift();if(f==="inprogress")f=d.shift();if(f){b==="fx"&&d.unshift("inprogress");f.call(a,function(){c.dequeue(a,b)})}}});c.fn.extend({queue:function(a,b){if(typeof a!=="string"){b=a;a="fx"}if(b=== -w)return c.queue(this[0],a);return this.each(function(){var d=c.queue(this,a,b);a==="fx"&&d[0]!=="inprogress"&&c.dequeue(this,a)})},dequeue:function(a){return this.each(function(){c.dequeue(this,a)})},delay:function(a,b){a=c.fx?c.fx.speeds[a]||a:a;b=b||"fx";return this.queue(b,function(){var d=this;setTimeout(function(){c.dequeue(d,b)},a)})},clearQueue:function(a){return this.queue(a||"fx",[])}});var Aa=/[\n\t]/g,ca=/\s+/,Za=/\r/g,$a=/href|src|style/,ab=/(button|input)/i,bb=/(button|input|object|select|textarea)/i, -cb=/^(a|area)$/i,Ba=/radio|checkbox/;c.fn.extend({attr:function(a,b){return X(this,a,b,true,c.attr)},removeAttr:function(a){return this.each(function(){c.attr(this,a,"");this.nodeType===1&&this.removeAttribute(a)})},addClass:function(a){if(c.isFunction(a))return this.each(function(n){var r=c(this);r.addClass(a.call(this,n,r.attr("class")))});if(a&&typeof a==="string")for(var b=(a||"").split(ca),d=0,f=this.length;d-1)return true;return false},val:function(a){if(a===w){var b=this[0];if(b){if(c.nodeName(b,"option"))return(b.attributes.value||{}).specified?b.value:b.text;if(c.nodeName(b,"select")){var d=b.selectedIndex,f=[],e=b.options;b=b.type==="select-one";if(d<0)return null;var j=b?d:0;for(d=b?d+1:e.length;j=0;else if(c.nodeName(this,"select")){var u=c.makeArray(r);c("option",this).each(function(){this.selected= -c.inArray(c(this).val(),u)>=0});if(!u.length)this.selectedIndex=-1}else this.value=r}})}});c.extend({attrFn:{val:true,css:true,html:true,text:true,data:true,width:true,height:true,offset:true},attr:function(a,b,d,f){if(!a||a.nodeType===3||a.nodeType===8)return w;if(f&&b in c.attrFn)return c(a)[b](d);f=a.nodeType!==1||!c.isXMLDoc(a);var e=d!==w;b=f&&c.props[b]||b;if(a.nodeType===1){var j=$a.test(b);if(b in a&&f&&!j){if(e){b==="type"&&ab.test(a.nodeName)&&a.parentNode&&c.error("type property can't be changed"); -a[b]=d}if(c.nodeName(a,"form")&&a.getAttributeNode(b))return a.getAttributeNode(b).nodeValue;if(b==="tabIndex")return(b=a.getAttributeNode("tabIndex"))&&b.specified?b.value:bb.test(a.nodeName)||cb.test(a.nodeName)&&a.href?0:w;return a[b]}if(!c.support.style&&f&&b==="style"){if(e)a.style.cssText=""+d;return a.style.cssText}e&&a.setAttribute(b,""+d);a=!c.support.hrefNormalized&&f&&j?a.getAttribute(b,2):a.getAttribute(b);return a===null?w:a}return c.style(a,b,d)}});var O=/\.(.*)$/,db=function(a){return a.replace(/[^\w\s\.\|`]/g, -function(b){return"\\"+b})};c.event={add:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){if(a.setInterval&&a!==A&&!a.frameElement)a=A;var e,j;if(d.handler){e=d;d=e.handler}if(!d.guid)d.guid=c.guid++;if(j=c.data(a)){var i=j.events=j.events||{},o=j.handle;if(!o)j.handle=o=function(){return typeof c!=="undefined"&&!c.event.triggered?c.event.handle.apply(o.elem,arguments):w};o.elem=a;b=b.split(" ");for(var k,n=0,r;k=b[n++];){j=e?c.extend({},e):{handler:d,data:f};if(k.indexOf(".")>-1){r=k.split("."); -k=r.shift();j.namespace=r.slice(0).sort().join(".")}else{r=[];j.namespace=""}j.type=k;j.guid=d.guid;var u=i[k],z=c.event.special[k]||{};if(!u){u=i[k]=[];if(!z.setup||z.setup.call(a,f,r,o)===false)if(a.addEventListener)a.addEventListener(k,o,false);else a.attachEvent&&a.attachEvent("on"+k,o)}if(z.add){z.add.call(a,j);if(!j.handler.guid)j.handler.guid=d.guid}u.push(j);c.event.global[k]=true}a=null}}},global:{},remove:function(a,b,d,f){if(!(a.nodeType===3||a.nodeType===8)){var e,j=0,i,o,k,n,r,u,z=c.data(a), -C=z&&z.events;if(z&&C){if(b&&b.type){d=b.handler;b=b.type}if(!b||typeof b==="string"&&b.charAt(0)==="."){b=b||"";for(e in C)c.event.remove(a,e+b)}else{for(b=b.split(" ");e=b[j++];){n=e;i=e.indexOf(".")<0;o=[];if(!i){o=e.split(".");e=o.shift();k=new RegExp("(^|\\.)"+c.map(o.slice(0).sort(),db).join("\\.(?:.*\\.)?")+"(\\.|$)")}if(r=C[e])if(d){n=c.event.special[e]||{};for(B=f||0;B=0){a.type= -e=e.slice(0,-1);a.exclusive=true}if(!d){a.stopPropagation();c.event.global[e]&&c.each(c.cache,function(){this.events&&this.events[e]&&c.event.trigger(a,b,this.handle.elem)})}if(!d||d.nodeType===3||d.nodeType===8)return w;a.result=w;a.target=d;b=c.makeArray(b);b.unshift(a)}a.currentTarget=d;(f=c.data(d,"handle"))&&f.apply(d,b);f=d.parentNode||d.ownerDocument;try{if(!(d&&d.nodeName&&c.noData[d.nodeName.toLowerCase()]))if(d["on"+e]&&d["on"+e].apply(d,b)===false)a.result=false}catch(j){}if(!a.isPropagationStopped()&& -f)c.event.trigger(a,b,f,true);else if(!a.isDefaultPrevented()){f=a.target;var i,o=c.nodeName(f,"a")&&e==="click",k=c.event.special[e]||{};if((!k._default||k._default.call(d,a)===false)&&!o&&!(f&&f.nodeName&&c.noData[f.nodeName.toLowerCase()])){try{if(f[e]){if(i=f["on"+e])f["on"+e]=null;c.event.triggered=true;f[e]()}}catch(n){}if(i)f["on"+e]=i;c.event.triggered=false}}},handle:function(a){var b,d,f,e;a=arguments[0]=c.event.fix(a||A.event);a.currentTarget=this;b=a.type.indexOf(".")<0&&!a.exclusive; -if(!b){d=a.type.split(".");a.type=d.shift();f=new RegExp("(^|\\.)"+d.slice(0).sort().join("\\.(?:.*\\.)?")+"(\\.|$)")}e=c.data(this,"events");d=e[a.type];if(e&&d){d=d.slice(0);e=0;for(var j=d.length;e-1?c.map(a.options,function(f){return f.selected}).join("-"):"";else if(a.nodeName.toLowerCase()==="select")d=a.selectedIndex;return d},fa=function(a,b){var d=a.target,f,e;if(!(!da.test(d.nodeName)||d.readOnly)){f=c.data(d,"_change_data");e=Fa(d);if(a.type!=="focusout"||d.type!=="radio")c.data(d,"_change_data", -e);if(!(f===w||e===f))if(f!=null||e){a.type="change";return c.event.trigger(a,b,d)}}};c.event.special.change={filters:{focusout:fa,click:function(a){var b=a.target,d=b.type;if(d==="radio"||d==="checkbox"||b.nodeName.toLowerCase()==="select")return fa.call(this,a)},keydown:function(a){var b=a.target,d=b.type;if(a.keyCode===13&&b.nodeName.toLowerCase()!=="textarea"||a.keyCode===32&&(d==="checkbox"||d==="radio")||d==="select-multiple")return fa.call(this,a)},beforeactivate:function(a){a=a.target;c.data(a, -"_change_data",Fa(a))}},setup:function(){if(this.type==="file")return false;for(var a in ea)c.event.add(this,a+".specialChange",ea[a]);return da.test(this.nodeName)},teardown:function(){c.event.remove(this,".specialChange");return da.test(this.nodeName)}};ea=c.event.special.change.filters}s.addEventListener&&c.each({focus:"focusin",blur:"focusout"},function(a,b){function d(f){f=c.event.fix(f);f.type=b;return c.event.handle.call(this,f)}c.event.special[b]={setup:function(){this.addEventListener(a, -d,true)},teardown:function(){this.removeEventListener(a,d,true)}}});c.each(["bind","one"],function(a,b){c.fn[b]=function(d,f,e){if(typeof d==="object"){for(var j in d)this[b](j,f,d[j],e);return this}if(c.isFunction(f)){e=f;f=w}var i=b==="one"?c.proxy(e,function(k){c(this).unbind(k,i);return e.apply(this,arguments)}):e;if(d==="unload"&&b!=="one")this.one(d,f,e);else{j=0;for(var o=this.length;j0){y=t;break}}t=t[g]}m[q]=y}}}var f=/((?:\((?:\([^()]+\)|[^()]+)+\)|\[(?:\[[^[\]]*\]|['"][^'"]*['"]|[^[\]'"]+)+\]|\\.|[^ >+~,(\[\\]+)+|[>+~])(\s*,\s*)?((?:.|\r|\n)*)/g, -e=0,j=Object.prototype.toString,i=false,o=true;[0,0].sort(function(){o=false;return 0});var k=function(g,h,l,m){l=l||[];var q=h=h||s;if(h.nodeType!==1&&h.nodeType!==9)return[];if(!g||typeof g!=="string")return l;for(var p=[],v,t,y,S,H=true,M=x(h),I=g;(f.exec(""),v=f.exec(I))!==null;){I=v[3];p.push(v[1]);if(v[2]){S=v[3];break}}if(p.length>1&&r.exec(g))if(p.length===2&&n.relative[p[0]])t=ga(p[0]+p[1],h);else for(t=n.relative[p[0]]?[h]:k(p.shift(),h);p.length;){g=p.shift();if(n.relative[g])g+=p.shift(); -t=ga(g,t)}else{if(!m&&p.length>1&&h.nodeType===9&&!M&&n.match.ID.test(p[0])&&!n.match.ID.test(p[p.length-1])){v=k.find(p.shift(),h,M);h=v.expr?k.filter(v.expr,v.set)[0]:v.set[0]}if(h){v=m?{expr:p.pop(),set:z(m)}:k.find(p.pop(),p.length===1&&(p[0]==="~"||p[0]==="+")&&h.parentNode?h.parentNode:h,M);t=v.expr?k.filter(v.expr,v.set):v.set;if(p.length>0)y=z(t);else H=false;for(;p.length;){var D=p.pop();v=D;if(n.relative[D])v=p.pop();else D="";if(v==null)v=h;n.relative[D](y,v,M)}}else y=[]}y||(y=t);y||k.error(D|| -g);if(j.call(y)==="[object Array]")if(H)if(h&&h.nodeType===1)for(g=0;y[g]!=null;g++){if(y[g]&&(y[g]===true||y[g].nodeType===1&&E(h,y[g])))l.push(t[g])}else for(g=0;y[g]!=null;g++)y[g]&&y[g].nodeType===1&&l.push(t[g]);else l.push.apply(l,y);else z(y,l);if(S){k(S,q,l,m);k.uniqueSort(l)}return l};k.uniqueSort=function(g){if(B){i=o;g.sort(B);if(i)for(var h=1;h":function(g,h){var l=typeof h==="string";if(l&&!/\W/.test(h)){h=h.toLowerCase();for(var m=0,q=g.length;m=0))l||m.push(v);else if(l)h[p]=false;return false},ID:function(g){return g[1].replace(/\\/g,"")},TAG:function(g){return g[1].toLowerCase()}, -CHILD:function(g){if(g[1]==="nth"){var h=/(-?)(\d*)n((?:\+|-)?\d*)/.exec(g[2]==="even"&&"2n"||g[2]==="odd"&&"2n+1"||!/\D/.test(g[2])&&"0n+"+g[2]||g[2]);g[2]=h[1]+(h[2]||1)-0;g[3]=h[3]-0}g[0]=e++;return g},ATTR:function(g,h,l,m,q,p){h=g[1].replace(/\\/g,"");if(!p&&n.attrMap[h])g[1]=n.attrMap[h];if(g[2]==="~=")g[4]=" "+g[4]+" ";return g},PSEUDO:function(g,h,l,m,q){if(g[1]==="not")if((f.exec(g[3])||"").length>1||/^\w/.test(g[3]))g[3]=k(g[3],null,null,h);else{g=k.filter(g[3],h,l,true^q);l||m.push.apply(m, -g);return false}else if(n.match.POS.test(g[0])||n.match.CHILD.test(g[0]))return true;return g},POS:function(g){g.unshift(true);return g}},filters:{enabled:function(g){return g.disabled===false&&g.type!=="hidden"},disabled:function(g){return g.disabled===true},checked:function(g){return g.checked===true},selected:function(g){return g.selected===true},parent:function(g){return!!g.firstChild},empty:function(g){return!g.firstChild},has:function(g,h,l){return!!k(l[3],g).length},header:function(g){return/h\d/i.test(g.nodeName)}, -text:function(g){return"text"===g.type},radio:function(g){return"radio"===g.type},checkbox:function(g){return"checkbox"===g.type},file:function(g){return"file"===g.type},password:function(g){return"password"===g.type},submit:function(g){return"submit"===g.type},image:function(g){return"image"===g.type},reset:function(g){return"reset"===g.type},button:function(g){return"button"===g.type||g.nodeName.toLowerCase()==="button"},input:function(g){return/input|select|textarea|button/i.test(g.nodeName)}}, -setFilters:{first:function(g,h){return h===0},last:function(g,h,l,m){return h===m.length-1},even:function(g,h){return h%2===0},odd:function(g,h){return h%2===1},lt:function(g,h,l){return hl[3]-0},nth:function(g,h,l){return l[3]-0===h},eq:function(g,h,l){return l[3]-0===h}},filter:{PSEUDO:function(g,h,l,m){var q=h[1],p=n.filters[q];if(p)return p(g,l,h,m);else if(q==="contains")return(g.textContent||g.innerText||a([g])||"").indexOf(h[3])>=0;else if(q==="not"){h= -h[3];l=0;for(m=h.length;l=0}},ID:function(g,h){return g.nodeType===1&&g.getAttribute("id")===h},TAG:function(g,h){return h==="*"&&g.nodeType===1||g.nodeName.toLowerCase()===h},CLASS:function(g,h){return(" "+(g.className||g.getAttribute("class"))+" ").indexOf(h)>-1},ATTR:function(g,h){var l=h[1];g=n.attrHandle[l]?n.attrHandle[l](g):g[l]!=null?g[l]:g.getAttribute(l);l=g+"";var m=h[2];h=h[4];return g==null?m==="!=":m=== -"="?l===h:m==="*="?l.indexOf(h)>=0:m==="~="?(" "+l+" ").indexOf(h)>=0:!h?l&&g!==false:m==="!="?l!==h:m==="^="?l.indexOf(h)===0:m==="$="?l.substr(l.length-h.length)===h:m==="|="?l===h||l.substr(0,h.length+1)===h+"-":false},POS:function(g,h,l,m){var q=n.setFilters[h[2]];if(q)return q(g,l,h,m)}}},r=n.match.POS;for(var u in n.match){n.match[u]=new RegExp(n.match[u].source+/(?![^\[]*\])(?![^\(]*\))/.source);n.leftMatch[u]=new RegExp(/(^(?:.|\r|\n)*?)/.source+n.match[u].source.replace(/\\(\d+)/g,function(g, -h){return"\\"+(h-0+1)}))}var z=function(g,h){g=Array.prototype.slice.call(g,0);if(h){h.push.apply(h,g);return h}return g};try{Array.prototype.slice.call(s.documentElement.childNodes,0)}catch(C){z=function(g,h){h=h||[];if(j.call(g)==="[object Array]")Array.prototype.push.apply(h,g);else if(typeof g.length==="number")for(var l=0,m=g.length;l";var l=s.documentElement;l.insertBefore(g,l.firstChild);if(s.getElementById(h)){n.find.ID=function(m,q,p){if(typeof q.getElementById!=="undefined"&&!p)return(q=q.getElementById(m[1]))?q.id===m[1]||typeof q.getAttributeNode!=="undefined"&& -q.getAttributeNode("id").nodeValue===m[1]?[q]:w:[]};n.filter.ID=function(m,q){var p=typeof m.getAttributeNode!=="undefined"&&m.getAttributeNode("id");return m.nodeType===1&&p&&p.nodeValue===q}}l.removeChild(g);l=g=null})();(function(){var g=s.createElement("div");g.appendChild(s.createComment(""));if(g.getElementsByTagName("*").length>0)n.find.TAG=function(h,l){l=l.getElementsByTagName(h[1]);if(h[1]==="*"){h=[];for(var m=0;l[m];m++)l[m].nodeType===1&&h.push(l[m]);l=h}return l};g.innerHTML=""; -if(g.firstChild&&typeof g.firstChild.getAttribute!=="undefined"&&g.firstChild.getAttribute("href")!=="#")n.attrHandle.href=function(h){return h.getAttribute("href",2)};g=null})();s.querySelectorAll&&function(){var g=k,h=s.createElement("div");h.innerHTML="

";if(!(h.querySelectorAll&&h.querySelectorAll(".TEST").length===0)){k=function(m,q,p,v){q=q||s;if(!v&&q.nodeType===9&&!x(q))try{return z(q.querySelectorAll(m),p)}catch(t){}return g(m,q,p,v)};for(var l in g)k[l]=g[l];h=null}}(); -(function(){var g=s.createElement("div");g.innerHTML="
";if(!(!g.getElementsByClassName||g.getElementsByClassName("e").length===0)){g.lastChild.className="e";if(g.getElementsByClassName("e").length!==1){n.order.splice(1,0,"CLASS");n.find.CLASS=function(h,l,m){if(typeof l.getElementsByClassName!=="undefined"&&!m)return l.getElementsByClassName(h[1])};g=null}}})();var E=s.compareDocumentPosition?function(g,h){return!!(g.compareDocumentPosition(h)&16)}: -function(g,h){return g!==h&&(g.contains?g.contains(h):true)},x=function(g){return(g=(g?g.ownerDocument||g:0).documentElement)?g.nodeName!=="HTML":false},ga=function(g,h){var l=[],m="",q;for(h=h.nodeType?[h]:h;q=n.match.PSEUDO.exec(g);){m+=q[0];g=g.replace(n.match.PSEUDO,"")}g=n.relative[g]?g+"*":g;q=0;for(var p=h.length;q=0===d})};c.fn.extend({find:function(a){for(var b=this.pushStack("","find",a),d=0,f=0,e=this.length;f0)for(var j=d;j0},closest:function(a,b){if(c.isArray(a)){var d=[],f=this[0],e,j= -{},i;if(f&&a.length){e=0;for(var o=a.length;e-1:c(f).is(e)){d.push({selector:i,elem:f});delete j[i]}}f=f.parentNode}}return d}var k=c.expr.match.POS.test(a)?c(a,b||this.context):null;return this.map(function(n,r){for(;r&&r.ownerDocument&&r!==b;){if(k?k.index(r)>-1:c(r).is(a))return r;r=r.parentNode}return null})},index:function(a){if(!a||typeof a=== -"string")return c.inArray(this[0],a?c(a):this.parent().children());return c.inArray(a.jquery?a[0]:a,this)},add:function(a,b){a=typeof a==="string"?c(a,b||this.context):c.makeArray(a);b=c.merge(this.get(),a);return this.pushStack(qa(a[0])||qa(b[0])?b:c.unique(b))},andSelf:function(){return this.add(this.prevObject)}});c.each({parent:function(a){return(a=a.parentNode)&&a.nodeType!==11?a:null},parents:function(a){return c.dir(a,"parentNode")},parentsUntil:function(a,b,d){return c.dir(a,"parentNode", -d)},next:function(a){return c.nth(a,2,"nextSibling")},prev:function(a){return c.nth(a,2,"previousSibling")},nextAll:function(a){return c.dir(a,"nextSibling")},prevAll:function(a){return c.dir(a,"previousSibling")},nextUntil:function(a,b,d){return c.dir(a,"nextSibling",d)},prevUntil:function(a,b,d){return c.dir(a,"previousSibling",d)},siblings:function(a){return c.sibling(a.parentNode.firstChild,a)},children:function(a){return c.sibling(a.firstChild)},contents:function(a){return c.nodeName(a,"iframe")? -a.contentDocument||a.contentWindow.document:c.makeArray(a.childNodes)}},function(a,b){c.fn[a]=function(d,f){var e=c.map(this,b,d);eb.test(a)||(f=d);if(f&&typeof f==="string")e=c.filter(f,e);e=this.length>1?c.unique(e):e;if((this.length>1||gb.test(f))&&fb.test(a))e=e.reverse();return this.pushStack(e,a,R.call(arguments).join(","))}});c.extend({filter:function(a,b,d){if(d)a=":not("+a+")";return c.find.matches(a,b)},dir:function(a,b,d){var f=[];for(a=a[b];a&&a.nodeType!==9&&(d===w||a.nodeType!==1||!c(a).is(d));){a.nodeType=== -1&&f.push(a);a=a[b]}return f},nth:function(a,b,d){b=b||1;for(var f=0;a;a=a[d])if(a.nodeType===1&&++f===b)break;return a},sibling:function(a,b){for(var d=[];a;a=a.nextSibling)a.nodeType===1&&a!==b&&d.push(a);return d}});var Ja=/ jQuery\d+="(?:\d+|null)"/g,V=/^\s+/,Ka=/(<([\w:]+)[^>]*?)\/>/g,hb=/^(?:area|br|col|embed|hr|img|input|link|meta|param)$/i,La=/<([\w:]+)/,ib=/"},F={option:[1,""],legend:[1,"
","
"],thead:[1,"","
"],tr:[2,"","
"],td:[3,"","
"],col:[2,"","
"],area:[1,"",""],_default:[0,"",""]};F.optgroup=F.option;F.tbody=F.tfoot=F.colgroup=F.caption=F.thead;F.th=F.td;if(!c.support.htmlSerialize)F._default=[1,"div
","
"];c.fn.extend({text:function(a){if(c.isFunction(a))return this.each(function(b){var d= -c(this);d.text(a.call(this,b,d.text()))});if(typeof a!=="object"&&a!==w)return this.empty().append((this[0]&&this[0].ownerDocument||s).createTextNode(a));return c.text(this)},wrapAll:function(a){if(c.isFunction(a))return this.each(function(d){c(this).wrapAll(a.call(this,d))});if(this[0]){var b=c(a,this[0].ownerDocument).eq(0).clone(true);this[0].parentNode&&b.insertBefore(this[0]);b.map(function(){for(var d=this;d.firstChild&&d.firstChild.nodeType===1;)d=d.firstChild;return d}).append(this)}return this}, -wrapInner:function(a){if(c.isFunction(a))return this.each(function(b){c(this).wrapInner(a.call(this,b))});return this.each(function(){var b=c(this),d=b.contents();d.length?d.wrapAll(a):b.append(a)})},wrap:function(a){return this.each(function(){c(this).wrapAll(a)})},unwrap:function(){return this.parent().each(function(){c.nodeName(this,"body")||c(this).replaceWith(this.childNodes)}).end()},append:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.appendChild(a)})}, -prepend:function(){return this.domManip(arguments,true,function(a){this.nodeType===1&&this.insertBefore(a,this.firstChild)})},before:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b,this)});else if(arguments.length){var a=c(arguments[0]);a.push.apply(a,this.toArray());return this.pushStack(a,"before",arguments)}},after:function(){if(this[0]&&this[0].parentNode)return this.domManip(arguments,false,function(b){this.parentNode.insertBefore(b, -this.nextSibling)});else if(arguments.length){var a=this.pushStack(this,"after",arguments);a.push.apply(a,c(arguments[0]).toArray());return a}},remove:function(a,b){for(var d=0,f;(f=this[d])!=null;d++)if(!a||c.filter(a,[f]).length){if(!b&&f.nodeType===1){c.cleanData(f.getElementsByTagName("*"));c.cleanData([f])}f.parentNode&&f.parentNode.removeChild(f)}return this},empty:function(){for(var a=0,b;(b=this[a])!=null;a++)for(b.nodeType===1&&c.cleanData(b.getElementsByTagName("*"));b.firstChild;)b.removeChild(b.firstChild); -return this},clone:function(a){var b=this.map(function(){if(!c.support.noCloneEvent&&!c.isXMLDoc(this)){var d=this.outerHTML,f=this.ownerDocument;if(!d){d=f.createElement("div");d.appendChild(this.cloneNode(true));d=d.innerHTML}return c.clean([d.replace(Ja,"").replace(/=([^="'>\s]+\/)>/g,'="$1">').replace(V,"")],f)[0]}else return this.cloneNode(true)});if(a===true){ra(this,b);ra(this.find("*"),b.find("*"))}return b},html:function(a){if(a===w)return this[0]&&this[0].nodeType===1?this[0].innerHTML.replace(Ja, -""):null;else if(typeof a==="string"&&!ta.test(a)&&(c.support.leadingWhitespace||!V.test(a))&&!F[(La.exec(a)||["",""])[1].toLowerCase()]){a=a.replace(Ka,Ma);try{for(var b=0,d=this.length;b0||e.cacheable||this.length>1?k.cloneNode(true):k)}o.length&&c.each(o,Qa)}return this}});c.fragments={};c.each({appendTo:"append",prependTo:"prepend",insertBefore:"before",insertAfter:"after",replaceAll:"replaceWith"},function(a,b){c.fn[a]=function(d){var f=[];d=c(d);var e=this.length===1&&this[0].parentNode;if(e&&e.nodeType===11&&e.childNodes.length===1&&d.length===1){d[b](this[0]); -return this}else{e=0;for(var j=d.length;e0?this.clone(true):this).get();c.fn[b].apply(c(d[e]),i);f=f.concat(i)}return this.pushStack(f,a,d.selector)}}});c.extend({clean:function(a,b,d,f){b=b||s;if(typeof b.createElement==="undefined")b=b.ownerDocument||b[0]&&b[0].ownerDocument||s;for(var e=[],j=0,i;(i=a[j])!=null;j++){if(typeof i==="number")i+="";if(i){if(typeof i==="string"&&!jb.test(i))i=b.createTextNode(i);else if(typeof i==="string"){i=i.replace(Ka,Ma);var o=(La.exec(i)||["", -""])[1].toLowerCase(),k=F[o]||F._default,n=k[0],r=b.createElement("div");for(r.innerHTML=k[1]+i+k[2];n--;)r=r.lastChild;if(!c.support.tbody){n=ib.test(i);o=o==="table"&&!n?r.firstChild&&r.firstChild.childNodes:k[1]===""&&!n?r.childNodes:[];for(k=o.length-1;k>=0;--k)c.nodeName(o[k],"tbody")&&!o[k].childNodes.length&&o[k].parentNode.removeChild(o[k])}!c.support.leadingWhitespace&&V.test(i)&&r.insertBefore(b.createTextNode(V.exec(i)[0]),r.firstChild);i=r.childNodes}if(i.nodeType)e.push(i);else e= -c.merge(e,i)}}if(d)for(j=0;e[j];j++)if(f&&c.nodeName(e[j],"script")&&(!e[j].type||e[j].type.toLowerCase()==="text/javascript"))f.push(e[j].parentNode?e[j].parentNode.removeChild(e[j]):e[j]);else{e[j].nodeType===1&&e.splice.apply(e,[j+1,0].concat(c.makeArray(e[j].getElementsByTagName("script"))));d.appendChild(e[j])}return e},cleanData:function(a){for(var b,d,f=c.cache,e=c.event.special,j=c.support.deleteExpando,i=0,o;(o=a[i])!=null;i++)if(d=o[c.expando]){b=f[d];if(b.events)for(var k in b.events)e[k]? -c.event.remove(o,k):Ca(o,k,b.handle);if(j)delete o[c.expando];else o.removeAttribute&&o.removeAttribute(c.expando);delete f[d]}}});var kb=/z-?index|font-?weight|opacity|zoom|line-?height/i,Na=/alpha\([^)]*\)/,Oa=/opacity=([^)]*)/,ha=/float/i,ia=/-([a-z])/ig,lb=/([A-Z])/g,mb=/^-?\d+(?:px)?$/i,nb=/^-?\d/,ob={position:"absolute",visibility:"hidden",display:"block"},pb=["Left","Right"],qb=["Top","Bottom"],rb=s.defaultView&&s.defaultView.getComputedStyle,Pa=c.support.cssFloat?"cssFloat":"styleFloat",ja= -function(a,b){return b.toUpperCase()};c.fn.css=function(a,b){return X(this,a,b,true,function(d,f,e){if(e===w)return c.curCSS(d,f);if(typeof e==="number"&&!kb.test(f))e+="px";c.style(d,f,e)})};c.extend({style:function(a,b,d){if(!a||a.nodeType===3||a.nodeType===8)return w;if((b==="width"||b==="height")&&parseFloat(d)<0)d=w;var f=a.style||a,e=d!==w;if(!c.support.opacity&&b==="opacity"){if(e){f.zoom=1;b=parseInt(d,10)+""==="NaN"?"":"alpha(opacity="+d*100+")";a=f.filter||c.curCSS(a,"filter")||"";f.filter= -Na.test(a)?a.replace(Na,b):b}return f.filter&&f.filter.indexOf("opacity=")>=0?parseFloat(Oa.exec(f.filter)[1])/100+"":""}if(ha.test(b))b=Pa;b=b.replace(ia,ja);if(e)f[b]=d;return f[b]},css:function(a,b,d,f){if(b==="width"||b==="height"){var e,j=b==="width"?pb:qb;function i(){e=b==="width"?a.offsetWidth:a.offsetHeight;f!=="border"&&c.each(j,function(){f||(e-=parseFloat(c.curCSS(a,"padding"+this,true))||0);if(f==="margin")e+=parseFloat(c.curCSS(a,"margin"+this,true))||0;else e-=parseFloat(c.curCSS(a, -"border"+this+"Width",true))||0})}a.offsetWidth!==0?i():c.swap(a,ob,i);return Math.max(0,Math.round(e))}return c.curCSS(a,b,d)},curCSS:function(a,b,d){var f,e=a.style;if(!c.support.opacity&&b==="opacity"&&a.currentStyle){f=Oa.test(a.currentStyle.filter||"")?parseFloat(RegExp.$1)/100+"":"";return f===""?"1":f}if(ha.test(b))b=Pa;if(!d&&e&&e[b])f=e[b];else if(rb){if(ha.test(b))b="float";b=b.replace(lb,"-$1").toLowerCase();e=a.ownerDocument.defaultView;if(!e)return null;if(a=e.getComputedStyle(a,null))f= -a.getPropertyValue(b);if(b==="opacity"&&f==="")f="1"}else if(a.currentStyle){d=b.replace(ia,ja);f=a.currentStyle[b]||a.currentStyle[d];if(!mb.test(f)&&nb.test(f)){b=e.left;var j=a.runtimeStyle.left;a.runtimeStyle.left=a.currentStyle.left;e.left=d==="fontSize"?"1em":f||0;f=e.pixelLeft+"px";e.left=b;a.runtimeStyle.left=j}}return f},swap:function(a,b,d){var f={};for(var e in b){f[e]=a.style[e];a.style[e]=b[e]}d.call(a);for(e in b)a.style[e]=f[e]}});if(c.expr&&c.expr.filters){c.expr.filters.hidden=function(a){var b= -a.offsetWidth,d=a.offsetHeight,f=a.nodeName.toLowerCase()==="tr";return b===0&&d===0&&!f?true:b>0&&d>0&&!f?false:c.curCSS(a,"display")==="none"};c.expr.filters.visible=function(a){return!c.expr.filters.hidden(a)}}var sb=J(),tb=//gi,ub=/select|textarea/i,vb=/color|date|datetime|email|hidden|month|number|password|range|search|tel|text|time|url|week/i,N=/=\?(&|$)/,ka=/\?/,wb=/(\?|&)_=.*?(&|$)/,xb=/^(\w+:)?\/\/([^\/?#]+)/,yb=/%20/g,zb=c.fn.load;c.fn.extend({load:function(a,b,d){if(typeof a!== -"string")return zb.call(this,a);else if(!this.length)return this;var f=a.indexOf(" ");if(f>=0){var e=a.slice(f,a.length);a=a.slice(0,f)}f="GET";if(b)if(c.isFunction(b)){d=b;b=null}else if(typeof b==="object"){b=c.param(b,c.ajaxSettings.traditional);f="POST"}var j=this;c.ajax({url:a,type:f,dataType:"html",data:b,complete:function(i,o){if(o==="success"||o==="notmodified")j.html(e?c("
").append(i.responseText.replace(tb,"")).find(e):i.responseText);d&&j.each(d,[i.responseText,o,i])}});return this}, -serialize:function(){return c.param(this.serializeArray())},serializeArray:function(){return this.map(function(){return this.elements?c.makeArray(this.elements):this}).filter(function(){return this.name&&!this.disabled&&(this.checked||ub.test(this.nodeName)||vb.test(this.type))}).map(function(a,b){a=c(this).val();return a==null?null:c.isArray(a)?c.map(a,function(d){return{name:b.name,value:d}}):{name:b.name,value:a}}).get()}});c.each("ajaxStart ajaxStop ajaxComplete ajaxError ajaxSuccess ajaxSend".split(" "), -function(a,b){c.fn[b]=function(d){return this.bind(b,d)}});c.extend({get:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b=null}return c.ajax({type:"GET",url:a,data:b,success:d,dataType:f})},getScript:function(a,b){return c.get(a,null,b,"script")},getJSON:function(a,b,d){return c.get(a,b,d,"json")},post:function(a,b,d,f){if(c.isFunction(b)){f=f||d;d=b;b={}}return c.ajax({type:"POST",url:a,data:b,success:d,dataType:f})},ajaxSetup:function(a){c.extend(c.ajaxSettings,a)},ajaxSettings:{url:location.href, -global:true,type:"GET",contentType:"application/x-www-form-urlencoded",processData:true,async:true,xhr:A.XMLHttpRequest&&(A.location.protocol!=="file:"||!A.ActiveXObject)?function(){return new A.XMLHttpRequest}:function(){try{return new A.ActiveXObject("Microsoft.XMLHTTP")}catch(a){}},accepts:{xml:"application/xml, text/xml",html:"text/html",script:"text/javascript, application/javascript",json:"application/json, text/javascript",text:"text/plain",_default:"*/*"}},lastModified:{},etag:{},ajax:function(a){function b(){e.success&& -e.success.call(k,o,i,x);e.global&&f("ajaxSuccess",[x,e])}function d(){e.complete&&e.complete.call(k,x,i);e.global&&f("ajaxComplete",[x,e]);e.global&&!--c.active&&c.event.trigger("ajaxStop")}function f(q,p){(e.context?c(e.context):c.event).trigger(q,p)}var e=c.extend(true,{},c.ajaxSettings,a),j,i,o,k=a&&a.context||e,n=e.type.toUpperCase();if(e.data&&e.processData&&typeof e.data!=="string")e.data=c.param(e.data,e.traditional);if(e.dataType==="jsonp"){if(n==="GET")N.test(e.url)||(e.url+=(ka.test(e.url)? -"&":"?")+(e.jsonp||"callback")+"=?");else if(!e.data||!N.test(e.data))e.data=(e.data?e.data+"&":"")+(e.jsonp||"callback")+"=?";e.dataType="json"}if(e.dataType==="json"&&(e.data&&N.test(e.data)||N.test(e.url))){j=e.jsonpCallback||"jsonp"+sb++;if(e.data)e.data=(e.data+"").replace(N,"="+j+"$1");e.url=e.url.replace(N,"="+j+"$1");e.dataType="script";A[j]=A[j]||function(q){o=q;b();d();A[j]=w;try{delete A[j]}catch(p){}z&&z.removeChild(C)}}if(e.dataType==="script"&&e.cache===null)e.cache=false;if(e.cache=== -false&&n==="GET"){var r=J(),u=e.url.replace(wb,"$1_="+r+"$2");e.url=u+(u===e.url?(ka.test(e.url)?"&":"?")+"_="+r:"")}if(e.data&&n==="GET")e.url+=(ka.test(e.url)?"&":"?")+e.data;e.global&&!c.active++&&c.event.trigger("ajaxStart");r=(r=xb.exec(e.url))&&(r[1]&&r[1]!==location.protocol||r[2]!==location.host);if(e.dataType==="script"&&n==="GET"&&r){var z=s.getElementsByTagName("head")[0]||s.documentElement,C=s.createElement("script");C.src=e.url;if(e.scriptCharset)C.charset=e.scriptCharset;if(!j){var B= -false;C.onload=C.onreadystatechange=function(){if(!B&&(!this.readyState||this.readyState==="loaded"||this.readyState==="complete")){B=true;b();d();C.onload=C.onreadystatechange=null;z&&C.parentNode&&z.removeChild(C)}}}z.insertBefore(C,z.firstChild);return w}var E=false,x=e.xhr();if(x){e.username?x.open(n,e.url,e.async,e.username,e.password):x.open(n,e.url,e.async);try{if(e.data||a&&a.contentType)x.setRequestHeader("Content-Type",e.contentType);if(e.ifModified){c.lastModified[e.url]&&x.setRequestHeader("If-Modified-Since", -c.lastModified[e.url]);c.etag[e.url]&&x.setRequestHeader("If-None-Match",c.etag[e.url])}r||x.setRequestHeader("X-Requested-With","XMLHttpRequest");x.setRequestHeader("Accept",e.dataType&&e.accepts[e.dataType]?e.accepts[e.dataType]+", */*":e.accepts._default)}catch(ga){}if(e.beforeSend&&e.beforeSend.call(k,x,e)===false){e.global&&!--c.active&&c.event.trigger("ajaxStop");x.abort();return false}e.global&&f("ajaxSend",[x,e]);var g=x.onreadystatechange=function(q){if(!x||x.readyState===0||q==="abort"){E|| -d();E=true;if(x)x.onreadystatechange=c.noop}else if(!E&&x&&(x.readyState===4||q==="timeout")){E=true;x.onreadystatechange=c.noop;i=q==="timeout"?"timeout":!c.httpSuccess(x)?"error":e.ifModified&&c.httpNotModified(x,e.url)?"notmodified":"success";var p;if(i==="success")try{o=c.httpData(x,e.dataType,e)}catch(v){i="parsererror";p=v}if(i==="success"||i==="notmodified")j||b();else c.handleError(e,x,i,p);d();q==="timeout"&&x.abort();if(e.async)x=null}};try{var h=x.abort;x.abort=function(){x&&h.call(x); -g("abort")}}catch(l){}e.async&&e.timeout>0&&setTimeout(function(){x&&!E&&g("timeout")},e.timeout);try{x.send(n==="POST"||n==="PUT"||n==="DELETE"?e.data:null)}catch(m){c.handleError(e,x,null,m);d()}e.async||g();return x}},handleError:function(a,b,d,f){if(a.error)a.error.call(a.context||a,b,d,f);if(a.global)(a.context?c(a.context):c.event).trigger("ajaxError",[b,a,f])},active:0,httpSuccess:function(a){try{return!a.status&&location.protocol==="file:"||a.status>=200&&a.status<300||a.status===304||a.status=== -1223||a.status===0}catch(b){}return false},httpNotModified:function(a,b){var d=a.getResponseHeader("Last-Modified"),f=a.getResponseHeader("Etag");if(d)c.lastModified[b]=d;if(f)c.etag[b]=f;return a.status===304||a.status===0},httpData:function(a,b,d){var f=a.getResponseHeader("content-type")||"",e=b==="xml"||!b&&f.indexOf("xml")>=0;a=e?a.responseXML:a.responseText;e&&a.documentElement.nodeName==="parsererror"&&c.error("parsererror");if(d&&d.dataFilter)a=d.dataFilter(a,b);if(typeof a==="string")if(b=== -"json"||!b&&f.indexOf("json")>=0)a=c.parseJSON(a);else if(b==="script"||!b&&f.indexOf("javascript")>=0)c.globalEval(a);return a},param:function(a,b){function d(i,o){if(c.isArray(o))c.each(o,function(k,n){b||/\[\]$/.test(i)?f(i,n):d(i+"["+(typeof n==="object"||c.isArray(n)?k:"")+"]",n)});else!b&&o!=null&&typeof o==="object"?c.each(o,function(k,n){d(i+"["+k+"]",n)}):f(i,o)}function f(i,o){o=c.isFunction(o)?o():o;e[e.length]=encodeURIComponent(i)+"="+encodeURIComponent(o)}var e=[];if(b===w)b=c.ajaxSettings.traditional; -if(c.isArray(a)||a.jquery)c.each(a,function(){f(this.name,this.value)});else for(var j in a)d(j,a[j]);return e.join("&").replace(yb,"+")}});var la={},Ab=/toggle|show|hide/,Bb=/^([+-]=)?([\d+-.]+)(.*)$/,W,va=[["height","marginTop","marginBottom","paddingTop","paddingBottom"],["width","marginLeft","marginRight","paddingLeft","paddingRight"],["opacity"]];c.fn.extend({show:function(a,b){if(a||a===0)return this.animate(K("show",3),a,b);else{a=0;for(b=this.length;a").appendTo("body");f=e.css("display");if(f==="none")f="block";e.remove();la[d]=f}c.data(this[a],"olddisplay",f)}}a=0;for(b=this.length;a=0;f--)if(d[f].elem===this){b&&d[f](true);d.splice(f,1)}});b||this.dequeue();return this}});c.each({slideDown:K("show",1),slideUp:K("hide",1),slideToggle:K("toggle",1),fadeIn:{opacity:"show"},fadeOut:{opacity:"hide"}},function(a,b){c.fn[a]=function(d,f){return this.animate(b,d,f)}});c.extend({speed:function(a,b,d){var f=a&&typeof a==="object"?a:{complete:d||!d&&b||c.isFunction(a)&&a,duration:a,easing:d&&b||b&&!c.isFunction(b)&&b};f.duration=c.fx.off?0:typeof f.duration=== -"number"?f.duration:c.fx.speeds[f.duration]||c.fx.speeds._default;f.old=f.complete;f.complete=function(){f.queue!==false&&c(this).dequeue();c.isFunction(f.old)&&f.old.call(this)};return f},easing:{linear:function(a,b,d,f){return d+f*a},swing:function(a,b,d,f){return(-Math.cos(a*Math.PI)/2+0.5)*f+d}},timers:[],fx:function(a,b,d){this.options=b;this.elem=a;this.prop=d;if(!b.orig)b.orig={}}});c.fx.prototype={update:function(){this.options.step&&this.options.step.call(this.elem,this.now,this);(c.fx.step[this.prop]|| -c.fx.step._default)(this);if((this.prop==="height"||this.prop==="width")&&this.elem.style)this.elem.style.display="block"},cur:function(a){if(this.elem[this.prop]!=null&&(!this.elem.style||this.elem.style[this.prop]==null))return this.elem[this.prop];return(a=parseFloat(c.css(this.elem,this.prop,a)))&&a>-10000?a:parseFloat(c.curCSS(this.elem,this.prop))||0},custom:function(a,b,d){function f(j){return e.step(j)}this.startTime=J();this.start=a;this.end=b;this.unit=d||this.unit||"px";this.now=this.start; -this.pos=this.state=0;var e=this;f.elem=this.elem;if(f()&&c.timers.push(f)&&!W)W=setInterval(c.fx.tick,13)},show:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.show=true;this.custom(this.prop==="width"||this.prop==="height"?1:0,this.cur());c(this.elem).show()},hide:function(){this.options.orig[this.prop]=c.style(this.elem,this.prop);this.options.hide=true;this.custom(this.cur(),0)},step:function(a){var b=J(),d=true;if(a||b>=this.options.duration+this.startTime){this.now= -this.end;this.pos=this.state=1;this.update();this.options.curAnim[this.prop]=true;for(var f in this.options.curAnim)if(this.options.curAnim[f]!==true)d=false;if(d){if(this.options.display!=null){this.elem.style.overflow=this.options.overflow;a=c.data(this.elem,"olddisplay");this.elem.style.display=a?a:this.options.display;if(c.css(this.elem,"display")==="none")this.elem.style.display="block"}this.options.hide&&c(this.elem).hide();if(this.options.hide||this.options.show)for(var e in this.options.curAnim)c.style(this.elem, -e,this.options.orig[e]);this.options.complete.call(this.elem)}return false}else{e=b-this.startTime;this.state=e/this.options.duration;a=this.options.easing||(c.easing.swing?"swing":"linear");this.pos=c.easing[this.options.specialEasing&&this.options.specialEasing[this.prop]||a](this.state,e,0,1,this.options.duration);this.now=this.start+(this.end-this.start)*this.pos;this.update()}return true}};c.extend(c.fx,{tick:function(){for(var a=c.timers,b=0;b
"; -a.insertBefore(b,a.firstChild);d=b.firstChild;f=d.firstChild;e=d.nextSibling.firstChild.firstChild;this.doesNotAddBorder=f.offsetTop!==5;this.doesAddBorderForTableAndCells=e.offsetTop===5;f.style.position="fixed";f.style.top="20px";this.supportsFixedPosition=f.offsetTop===20||f.offsetTop===15;f.style.position=f.style.top="";d.style.overflow="hidden";d.style.position="relative";this.subtractsBorderForOverflowNotVisible=f.offsetTop===-5;this.doesNotIncludeMarginInBodyOffset=a.offsetTop!==j;a.removeChild(b); -c.offset.initialize=c.noop},bodyOffset:function(a){var b=a.offsetTop,d=a.offsetLeft;c.offset.initialize();if(c.offset.doesNotIncludeMarginInBodyOffset){b+=parseFloat(c.curCSS(a,"marginTop",true))||0;d+=parseFloat(c.curCSS(a,"marginLeft",true))||0}return{top:b,left:d}},setOffset:function(a,b,d){if(/static/.test(c.curCSS(a,"position")))a.style.position="relative";var f=c(a),e=f.offset(),j=parseInt(c.curCSS(a,"top",true),10)||0,i=parseInt(c.curCSS(a,"left",true),10)||0;if(c.isFunction(b))b=b.call(a, -d,e);d={top:b.top-e.top+j,left:b.left-e.left+i};"using"in b?b.using.call(a,d):f.css(d)}};c.fn.extend({position:function(){if(!this[0])return null;var a=this[0],b=this.offsetParent(),d=this.offset(),f=/^body|html$/i.test(b[0].nodeName)?{top:0,left:0}:b.offset();d.top-=parseFloat(c.curCSS(a,"marginTop",true))||0;d.left-=parseFloat(c.curCSS(a,"marginLeft",true))||0;f.top+=parseFloat(c.curCSS(b[0],"borderTopWidth",true))||0;f.left+=parseFloat(c.curCSS(b[0],"borderLeftWidth",true))||0;return{top:d.top- -f.top,left:d.left-f.left}},offsetParent:function(){return this.map(function(){for(var a=this.offsetParent||s.body;a&&!/^body|html$/i.test(a.nodeName)&&c.css(a,"position")==="static";)a=a.offsetParent;return a})}});c.each(["Left","Top"],function(a,b){var d="scroll"+b;c.fn[d]=function(f){var e=this[0],j;if(!e)return null;if(f!==w)return this.each(function(){if(j=wa(this))j.scrollTo(!a?f:c(j).scrollLeft(),a?f:c(j).scrollTop());else this[d]=f});else return(j=wa(e))?"pageXOffset"in j?j[a?"pageYOffset": -"pageXOffset"]:c.support.boxModel&&j.document.documentElement[d]||j.document.body[d]:e[d]}});c.each(["Height","Width"],function(a,b){var d=b.toLowerCase();c.fn["inner"+b]=function(){return this[0]?c.css(this[0],d,false,"padding"):null};c.fn["outer"+b]=function(f){return this[0]?c.css(this[0],d,false,f?"margin":"border"):null};c.fn[d]=function(f){var e=this[0];if(!e)return f==null?null:this;if(c.isFunction(f))return this.each(function(j){var i=c(this);i[d](f.call(this,j,i[d]()))});return"scrollTo"in -e&&e.document?e.document.compatMode==="CSS1Compat"&&e.document.documentElement["client"+b]||e.document.body["client"+b]:e.nodeType===9?Math.max(e.documentElement["client"+b],e.body["scroll"+b],e.documentElement["scroll"+b],e.body["offset"+b],e.documentElement["offset"+b]):f===w?c.css(e,d):this.css(d,typeof f==="string"?f:f+"px")}});A.jQuery=A.$=c})(window); diff --git a/_Old_doc/build/html/_static/minus.png b/_Old_doc/build/html/_static/minus.png deleted file mode 100644 index da1c5620d..000000000 Binary files a/_Old_doc/build/html/_static/minus.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/plus.png b/_Old_doc/build/html/_static/plus.png deleted file mode 100644 index b3cb37425..000000000 Binary files a/_Old_doc/build/html/_static/plus.png and /dev/null differ diff --git a/_Old_doc/build/html/_static/pygments.css b/_Old_doc/build/html/_static/pygments.css deleted file mode 100644 index 1a14f2ae1..000000000 --- a/_Old_doc/build/html/_static/pygments.css +++ /dev/null @@ -1,62 +0,0 @@ -.highlight .hll { background-color: #ffffcc } -.highlight { background: #eeffcc; } -.highlight .c { color: #408090; font-style: italic } /* Comment */ -.highlight .err { border: 1px solid #FF0000 } /* Error */ -.highlight .k { color: #007020; font-weight: bold } /* Keyword */ -.highlight .o { color: #666666 } /* Operator */ -.highlight .cm { color: #408090; font-style: italic } /* Comment.Multiline */ -.highlight .cp { color: #007020 } /* Comment.Preproc */ -.highlight .c1 { color: #408090; font-style: italic } /* Comment.Single */ -.highlight .cs { color: #408090; background-color: #fff0f0 } /* Comment.Special */ -.highlight .gd { color: #A00000 } /* Generic.Deleted */ -.highlight .ge { font-style: italic } /* Generic.Emph */ -.highlight .gr { color: #FF0000 } /* Generic.Error */ -.highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ -.highlight .gi { color: #00A000 } /* Generic.Inserted */ -.highlight .go { color: #303030 } /* Generic.Output */ -.highlight .gp { color: #c65d09; font-weight: bold } /* Generic.Prompt */ -.highlight .gs { font-weight: bold } /* Generic.Strong */ -.highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ -.highlight .gt { color: #0040D0 } /* Generic.Traceback */ -.highlight .kc { color: #007020; font-weight: bold } /* Keyword.Constant */ -.highlight .kd { color: #007020; font-weight: bold } /* Keyword.Declaration */ -.highlight .kn { color: #007020; font-weight: bold } /* Keyword.Namespace */ -.highlight .kp { color: #007020 } /* Keyword.Pseudo */ -.highlight .kr { color: #007020; font-weight: bold } /* Keyword.Reserved */ -.highlight .kt { color: #902000 } /* Keyword.Type */ -.highlight .m { color: #208050 } /* Literal.Number */ -.highlight .s { color: #4070a0 } /* Literal.String */ -.highlight .na { color: #4070a0 } /* Name.Attribute */ -.highlight .nb { color: #007020 } /* Name.Builtin */ -.highlight .nc { color: #0e84b5; font-weight: bold } /* Name.Class */ -.highlight .no { color: #60add5 } /* Name.Constant */ -.highlight .nd { color: #555555; font-weight: bold } /* Name.Decorator */ -.highlight .ni { color: #d55537; font-weight: bold } /* Name.Entity */ -.highlight .ne { color: #007020 } /* Name.Exception */ -.highlight .nf { color: #06287e } /* Name.Function */ -.highlight .nl { color: #002070; font-weight: bold } /* Name.Label */ -.highlight .nn { color: #0e84b5; font-weight: bold } /* Name.Namespace */ -.highlight .nt { color: #062873; font-weight: bold } /* Name.Tag */ -.highlight .nv { color: #bb60d5 } /* Name.Variable */ -.highlight .ow { color: #007020; font-weight: bold } /* Operator.Word */ -.highlight .w { color: #bbbbbb } /* Text.Whitespace */ -.highlight .mf { color: #208050 } /* Literal.Number.Float */ -.highlight .mh { color: #208050 } /* Literal.Number.Hex */ -.highlight .mi { color: #208050 } /* Literal.Number.Integer */ -.highlight .mo { color: #208050 } /* Literal.Number.Oct */ -.highlight .sb { color: #4070a0 } /* Literal.String.Backtick */ -.highlight .sc { color: #4070a0 } /* Literal.String.Char */ -.highlight .sd { color: #4070a0; font-style: italic } /* Literal.String.Doc */ -.highlight .s2 { color: #4070a0 } /* Literal.String.Double */ -.highlight .se { color: #4070a0; font-weight: bold } /* Literal.String.Escape */ -.highlight .sh { color: #4070a0 } /* Literal.String.Heredoc */ -.highlight .si { color: #70a0d0; font-style: italic } /* Literal.String.Interpol */ -.highlight .sx { color: #c65d09 } /* Literal.String.Other */ -.highlight .sr { color: #235388 } /* Literal.String.Regex */ -.highlight .s1 { color: #4070a0 } /* Literal.String.Single */ -.highlight .ss { color: #517918 } /* Literal.String.Symbol */ -.highlight .bp { color: #007020 } /* Name.Builtin.Pseudo */ -.highlight .vc { color: #bb60d5 } /* Name.Variable.Class */ -.highlight .vg { color: #bb60d5 } /* Name.Variable.Global */ -.highlight .vi { color: #bb60d5 } /* Name.Variable.Instance */ -.highlight .il { color: #208050 } /* Literal.Number.Integer.Long */ \ No newline at end of file diff --git a/_Old_doc/build/html/_static/searchtools.js b/_Old_doc/build/html/_static/searchtools.js deleted file mode 100644 index 663be4c90..000000000 --- a/_Old_doc/build/html/_static/searchtools.js +++ /dev/null @@ -1,560 +0,0 @@ -/* - * searchtools.js_t - * ~~~~~~~~~~~~~~~~ - * - * Sphinx JavaScript utilties for the full-text search. - * - * :copyright: Copyright 2007-2011 by the Sphinx team, see AUTHORS. - * :license: BSD, see LICENSE for details. - * - */ - -/** - * helper function to return a node containing the - * search summary for a given text. keywords is a list - * of stemmed words, hlwords is the list of normal, unstemmed - * words. the first one is used to find the occurance, the - * latter for highlighting it. - */ - -jQuery.makeSearchSummary = function(text, keywords, hlwords) { - var textLower = text.toLowerCase(); - var start = 0; - $.each(keywords, function() { - var i = textLower.indexOf(this.toLowerCase()); - if (i > -1) - start = i; - }); - start = Math.max(start - 120, 0); - var excerpt = ((start > 0) ? '...' : '') + - $.trim(text.substr(start, 240)) + - ((start + 240 - text.length) ? '...' : ''); - var rv = $('
').text(excerpt); - $.each(hlwords, function() { - rv = rv.highlightText(this, 'highlighted'); - }); - return rv; -} - - -/** - * Porter Stemmer - */ -var Stemmer = function() { - - var step2list = { - ational: 'ate', - tional: 'tion', - enci: 'ence', - anci: 'ance', - izer: 'ize', - bli: 'ble', - alli: 'al', - entli: 'ent', - eli: 'e', - ousli: 'ous', - ization: 'ize', - ation: 'ate', - ator: 'ate', - alism: 'al', - iveness: 'ive', - fulness: 'ful', - ousness: 'ous', - aliti: 'al', - iviti: 'ive', - biliti: 'ble', - logi: 'log' - }; - - var step3list = { - icate: 'ic', - ative: '', - alize: 'al', - iciti: 'ic', - ical: 'ic', - ful: '', - ness: '' - }; - - var c = "[^aeiou]"; // consonant - var v = "[aeiouy]"; // vowel - var C = c + "[^aeiouy]*"; // consonant sequence - var V = v + "[aeiou]*"; // vowel sequence - - var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0 - var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 - var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 - var s_v = "^(" + C + ")?" + v; // vowel in stem - - this.stemWord = function (w) { - var stem; - var suffix; - var firstch; - var origword = w; - - if (w.length < 3) - return w; - - var re; - var re2; - var re3; - var re4; - - firstch = w.substr(0,1); - if (firstch == "y") - w = firstch.toUpperCase() + w.substr(1); - - // Step 1a - re = /^(.+?)(ss|i)es$/; - re2 = /^(.+?)([^s])s$/; - - if (re.test(w)) - w = w.replace(re,"$1$2"); - else if (re2.test(w)) - w = w.replace(re2,"$1$2"); - - // Step 1b - re = /^(.+?)eed$/; - re2 = /^(.+?)(ed|ing)$/; - if (re.test(w)) { - var fp = re.exec(w); - re = new RegExp(mgr0); - if (re.test(fp[1])) { - re = /.$/; - w = w.replace(re,""); - } - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1]; - re2 = new RegExp(s_v); - if (re2.test(stem)) { - w = stem; - re2 = /(at|bl|iz)$/; - re3 = new RegExp("([^aeiouylsz])\\1$"); - re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re2.test(w)) - w = w + "e"; - else if (re3.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - else if (re4.test(w)) - w = w + "e"; - } - } - - // Step 1c - re = /^(.+?)y$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(s_v); - if (re.test(stem)) - w = stem + "i"; - } - - // Step 2 - re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step2list[suffix]; - } - - // Step 3 - re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - suffix = fp[2]; - re = new RegExp(mgr0); - if (re.test(stem)) - w = stem + step3list[suffix]; - } - - // Step 4 - re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; - re2 = /^(.+?)(s|t)(ion)$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - if (re.test(stem)) - w = stem; - } - else if (re2.test(w)) { - var fp = re2.exec(w); - stem = fp[1] + fp[2]; - re2 = new RegExp(mgr1); - if (re2.test(stem)) - w = stem; - } - - // Step 5 - re = /^(.+?)e$/; - if (re.test(w)) { - var fp = re.exec(w); - stem = fp[1]; - re = new RegExp(mgr1); - re2 = new RegExp(meq1); - re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); - if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) - w = stem; - } - re = /ll$/; - re2 = new RegExp(mgr1); - if (re.test(w) && re2.test(w)) { - re = /.$/; - w = w.replace(re,""); - } - - // and turn initial Y back to y - if (firstch == "y") - w = firstch.toLowerCase() + w.substr(1); - return w; - } -} - - -/** - * Search Module - */ -var Search = { - - _index : null, - _queued_query : null, - _pulse_status : -1, - - init : function() { - var params = $.getQueryParameters(); - if (params.q) { - var query = params.q[0]; - $('input[name="q"]')[0].value = query; - this.performSearch(query); - } - }, - - loadIndex : function(url) { - $.ajax({type: "GET", url: url, data: null, success: null, - dataType: "script", cache: true}); - }, - - setIndex : function(index) { - var q; - this._index = index; - if ((q = this._queued_query) !== null) { - this._queued_query = null; - Search.query(q); - } - }, - - hasIndex : function() { - return this._index !== null; - }, - - deferQuery : function(query) { - this._queued_query = query; - }, - - stopPulse : function() { - this._pulse_status = 0; - }, - - startPulse : function() { - if (this._pulse_status >= 0) - return; - function pulse() { - Search._pulse_status = (Search._pulse_status + 1) % 4; - var dotString = ''; - for (var i = 0; i < Search._pulse_status; i++) - dotString += '.'; - Search.dots.text(dotString); - if (Search._pulse_status > -1) - window.setTimeout(pulse, 500); - }; - pulse(); - }, - - /** - * perform a search for something - */ - performSearch : function(query) { - // create the required interface elements - this.out = $('#search-results'); - this.title = $('

' + _('Searching') + '

').appendTo(this.out); - this.dots = $('').appendTo(this.title); - this.status = $('

').appendTo(this.out); - this.output = $('