Version: 8.3.0
Methods dedicated to file export

Functions

void PMMLlib::PMMLlib::fillVectorsForExport (int nInput, int nOutput, int nHidden, int normType, std::vector< double > &minInput, std::vector< double > &maxInput, std::vector< double > &minOutput, std::vector< double > &maxOutput, std::vector< double > &valW)
 Specific to NeuralNetwork. More...
 
PMMLLIB_EXPORT void PMMLlib::PMMLlib::ExportCpp (std::string file, std::string functionName, std::string header)
 
PMMLLIB_EXPORT void PMMLlib::PMMLlib::ExportFortran (std::string file, std::string functionName, std::string header)
 
PMMLLIB_EXPORT void PMMLlib::PMMLlib::ExportPython (std::string file, std::string functionName, std::string header)
 
PMMLLIB_EXPORT std::string PMMLlib::PMMLlib::ExportPyStr (std::string functionName, std::string header)
 
void PMMLlib::PMMLlib::ExportNeuralNetworkCpp (std::string file, std::string functionName, std::string header)
 Specific to NeuralNetwork. More...
 
void PMMLlib::PMMLlib::ExportNeuralNetworkFortran (std::string file, std::string functionName, std::string header)
 Specific to NeuralNetwork. More...
 
void PMMLlib::PMMLlib::ExportNeuralNetworkPython (std::string file, std::string functionName, std::string header)
 Specific to NeuralNetwork. More...
 
std::string PMMLlib::PMMLlib::ExportNeuralNetworkPyStr (std::string functionName, std::string header)
 Specific to NeuralNetwork. More...
 
void PMMLlib::PMMLlib::ExportLinearRegressionCpp (std::string, std::string, std::string)
 Specific to RegressionModel. More...
 
void PMMLlib::PMMLlib::ExportLinearRegressionFortran (std::string, std::string, std::string)
 Specific to Regression. More...
 
void PMMLlib::PMMLlib::ExportLinearRegressionPython (std::string, std::string, std::string)
 Specific to Regression. More...
 
std::string PMMLlib::PMMLlib::ExportLinearRegressionPyStr (std::string functionName, std::string header)
 Specific to Regression. More...
 

Detailed Description

Methods dedicated to file export

Function Documentation

void PMMLlib::PMMLlib::ExportCpp ( std::string  file,
std::string  functionName,
std::string  header 
)

Export the current model as a function in a Cpp file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 597 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::_currentModelType, PMMLlib::PMMLlib::ExportLinearRegressionCpp(), PMMLlib::PMMLlib::ExportNeuralNetworkCpp(), PMMLlib::kANN, and PMMLlib::kLR.

600 {
601  if ( _currentModelType == kANN )
602  ExportNeuralNetworkCpp(file,functionName, header);
603  else if ( _currentModelType == kLR )
604  {
605  ExportLinearRegressionCpp(file, functionName, header);
606  }
607  else
608  throw string("ExportCpp : PMML type not handled.");
609 }
void PMMLlib::PMMLlib::ExportFortran ( std::string  file,
std::string  functionName,
std::string  header 
)

Export the current model as a function in a Fortran file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 617 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::_currentModelType, PMMLlib::PMMLlib::ExportLinearRegressionFortran(), PMMLlib::PMMLlib::ExportNeuralNetworkFortran(), PMMLlib::kANN, and PMMLlib::kLR.

620 {
621  if ( _currentModelType == kANN )
622  ExportNeuralNetworkFortran(file,functionName, header);
623  else if ( _currentModelType == kLR )
624  ExportLinearRegressionFortran(file,functionName, header);
625  else
626  throw string("ExportFortran : PMML type not handled.");
627 }
void PMMLlib::PMMLlib::ExportLinearRegressionCpp ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to RegressionModel.

Export the current model as a NeuralNetwork function in a Cpp file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 2641 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckRegression(), PMMLlib::PMMLlib::GetNumericPredictorCoefficient(), PMMLlib::PMMLlib::GetNumericPredictorName(), PMMLlib::PMMLlib::GetNumericPredictorNb(), PMMLlib::PMMLlib::GetPredictorTermCoefficient(), PMMLlib::PMMLlib::GetPredictorTermName(), PMMLlib::PMMLlib::GetPredictorTermNb(), PMMLlib::PMMLlib::GetRegressionTableIntercept(), PMMLlib::PMMLlib::HasIntercept(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportCpp().

2644 {
2645  CheckRegression();
2646 
2647  // Write the file
2648  ofstream exportfile(file.c_str());
2649 
2650  exportfile << "void " << functionName <<"(double *param, double *res)" << endl;
2651  exportfile << "{" << endl;
2652  // header
2653  exportfile << " ////////////////////////////// " << endl;
2654  exportfile << " //" << endl;
2655  // insert comments in header
2656  header = " // " + header;
2657  size_t pos = 0;
2658  while ((pos = header.find("\n", pos)) != std::string::npos)
2659  {
2660  header.replace(pos, 1, "\n //");
2661  pos += 5;
2662  }
2663  exportfile << header << endl;
2664  exportfile << " //" << endl;
2665  exportfile << " ////////////////////////////// " << endl << endl;
2666 
2667  double intercept = 0.0;
2668  if ( HasIntercept() )
2669  {
2670  exportfile << " // Intercept"<< endl;
2671  intercept = GetRegressionTableIntercept();
2672  }
2673  else
2674  exportfile << " // No Intercept"<< endl;
2675  exportfile << " double y = " << intercept << ";";
2676  exportfile << endl << endl;
2677 
2678  int nPred = GetNumericPredictorNb();
2679  for (int i=0; i<nPred; i++)
2680  {
2681  exportfile << " // Attribute : " << GetNumericPredictorName(i) << endl;
2682  exportfile << " y += param["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
2683  exportfile << endl << endl;
2684  }
2685  nPred = GetPredictorTermNb();
2686  for (int i=0; i<nPred; i++)
2687  {
2688  exportfile << " // Attribute : " << GetPredictorTermName(i) << endl;
2689  exportfile << " y += param["<<(i+nPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
2690  exportfile << endl << endl;
2691  }
2692 
2693  exportfile << " // Return the value"<< endl;
2694  exportfile << " res[0] = y;" << endl;
2695  exportfile << "}" << endl;
2696  exportfile.close();
2697 }
void PMMLlib::PMMLlib::ExportLinearRegressionFortran ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to Regression.

Export the current model as a NeuralNetwork function in a Fortran file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 2706 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckRegression(), PMMLlib::PMMLlib::GetNumericPredictorCoefficient(), PMMLlib::PMMLlib::GetNumericPredictorName(), PMMLlib::PMMLlib::GetNumericPredictorNb(), PMMLlib::PMMLlib::GetPredictorTermCoefficient(), PMMLlib::PMMLlib::GetPredictorTermName(), PMMLlib::PMMLlib::GetPredictorTermNb(), PMMLlib::PMMLlib::GetRegressionTableIntercept(), PMMLlib::PMMLlib::HasIntercept(), CORBAEngineTest::i, and PMMLlib::NumberToString().

Referenced by PMMLlib::PMMLlib::ExportFortran().

2709 {
2710  CheckRegression();
2711 
2712  int nNumPred = GetNumericPredictorNb();
2713  int nPredTerm = GetPredictorTermNb();
2714  vector<string>strParam(nNumPred+nPredTerm);
2715  for(int i=0; i<(nNumPred+nPredTerm); i++)
2716  {
2717  strParam[i] = "P" + NumberToString(i) ;
2718  }
2719 
2720  // Write the file
2721  ofstream exportfile(file.c_str());
2722 
2723  exportfile << " SUBROUTINE " << functionName <<"(";
2724  for(int i=0; i<(nNumPred+nPredTerm); i++)
2725  {
2726  exportfile << strParam[i] << ", ";
2727  }
2728  exportfile << "RES)" << endl;
2729 
2730  // header
2731  exportfile << "C --- *********************************************" << endl;
2732  exportfile << "C --- " << endl;
2733  // insert comments in header
2734  header = "C --- " + header;
2735  size_t pos = 0;
2736  while ((pos = header.find("\n", pos)) != std::string::npos)
2737  {
2738  header.replace(pos, 1, "\nC --- ");
2739  pos += 5;
2740  }
2741  exportfile << header << endl;
2742  exportfile << "C --- " << endl;
2743  exportfile << "C --- *********************************************" << endl << endl;
2744 
2745  exportfile << " IMPLICIT DOUBLE PRECISION (P)" << endl;
2746  exportfile << " DOUBLE PRECISION RES" << endl;
2747  exportfile << " DOUBLE PRECISION Y" << endl;
2748  exportfile << endl;
2749 
2750  double intercept = 0.0;
2751  if ( HasIntercept() )
2752  {
2753  exportfile << "C --- Intercept"<< endl;
2754  intercept = GetRegressionTableIntercept();
2755  }
2756  else
2757  exportfile << "C --- No Intercept"<< endl;
2758  exportfile << " Y = " << intercept << ";";
2759  exportfile << endl << endl;
2760 
2761  for (int i=0; i<nNumPred; i++)
2762  {
2763  exportfile << "C --- Attribute : " << GetNumericPredictorName(i) << endl;
2764  exportfile << " Y += P["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
2765  exportfile << endl << endl;
2766  }
2767 
2768  for (int i=0; i<nPredTerm; i++)
2769  {
2770  exportfile << "C --- Attribute : " << GetPredictorTermName(i) << endl;
2771  exportfile << " Y += P["<<(i+nNumPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
2772  exportfile << endl << endl;
2773  }
2774 
2775  exportfile << "C --- Return the value"<< endl;
2776  exportfile << " RES = Y " << endl;
2777  exportfile << " RETURN" << endl;
2778  exportfile << " END" << endl;
2779  exportfile.close();
2780 }
std::string PMMLlib::PMMLlib::ExportLinearRegressionPyStr ( std::string  functionName,
std::string  header 
)
private

Specific to Regression.

Export the current model as a NeuralNetwork function in a Python string.

Parameters
functionNameName of the function
headerHeader of the function

Definition at line 2806 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckRegression(), PMMLlib::PMMLlib::GetNumericPredictorCoefficient(), PMMLlib::PMMLlib::GetNumericPredictorName(), PMMLlib::PMMLlib::GetNumericPredictorNb(), PMMLlib::PMMLlib::GetPredictorTermCoefficient(), PMMLlib::PMMLlib::GetPredictorTermName(), PMMLlib::PMMLlib::GetPredictorTermNb(), PMMLlib::PMMLlib::GetRegressionTableIntercept(), PMMLlib::PMMLlib::HasIntercept(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportLinearRegressionPython(), and PMMLlib::PMMLlib::ExportPyStr().

2808 {
2809  CheckRegression();
2810 
2811  ostringstream out;
2812 
2813  // Shebang et imports
2814  out << "#!/usr/bin/env python" << endl;
2815  out << "# -*- coding: utf-8 -*-" << endl;
2816  out << endl;
2817 
2818  // Function
2819  out << "def " << functionName <<"(param):" << endl;
2820  out << endl;
2821 
2822  // header
2823  out << " ############################## " << endl;
2824  out << " # " << endl;
2825  // insert comments in header
2826  header = " # " + header;
2827  size_t pos = 0;
2828  while ((pos = header.find("\n", pos)) != std::string::npos)
2829  {
2830  header.replace(pos, 1, "\n #");
2831  pos += 5;
2832  }
2833  out << header << endl;
2834  out << " # " << endl;
2835  out << " ############################## " << endl << endl;
2836 
2837  double intercept = 0.0;
2838  if ( HasIntercept() )
2839  {
2840  out << " # Intercept"<< endl;
2841  intercept = GetRegressionTableIntercept();
2842  }
2843  else
2844  out << " # No Intercept"<< endl;
2845  out << " y = " << intercept << ";";
2846  out << endl << endl;
2847 
2848  int nPred = GetNumericPredictorNb();
2849  for (int i=0; i<nPred; i++)
2850  {
2851  out << " # Attribute : " << GetNumericPredictorName(i) << endl;
2852  out << " y += param["<<i<<"]*" << GetNumericPredictorCoefficient(i) << ";";
2853  out << endl << endl;
2854  }
2855  nPred = GetPredictorTermNb();
2856  for (int i=0; i<nPred; i++)
2857  {
2858  out << " # Attribute : " << GetPredictorTermName(i) << endl;
2859  out << " y += param["<<(i+nPred)<<"]*" << GetPredictorTermCoefficient(i) << ";";
2860  out << endl << endl;
2861  }
2862 
2863  out << " # Return the value"<< endl;
2864  out << " return [y];" << endl;
2865 
2866  return out.str() ;
2867 }
void PMMLlib::PMMLlib::ExportLinearRegressionPython ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to Regression.

Export the current model as a NeuralNetwork function in a Python file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 2789 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::ExportLinearRegressionPyStr().

Referenced by PMMLlib::PMMLlib::ExportPython().

2792 {
2793  string str(ExportLinearRegressionPyStr(functionName, header));
2794  // Write the file
2795  ofstream exportfile(file.c_str());
2796  exportfile << str;
2797  exportfile.close();
2798 }
void PMMLlib::PMMLlib::ExportNeuralNetworkCpp ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to NeuralNetwork.

Export the current model as a NeuralNetwork function in a Cpp file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 1696 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckNeuralNetwork(), PMMLlib::PMMLlib::fillVectorsForExport(), PMMLlib::PMMLlib::GetNbInputs(), PMMLlib::PMMLlib::GetNbNeuronsAtLayer(), PMMLlib::PMMLlib::GetNbOutputs(), PMMLlib::PMMLlib::GetNormalizationType(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportCpp().

1699 {
1701 
1702  // Get the different values required
1703  int nInput = GetNbInputs();
1704  int nOutput = GetNbOutputs();
1705  int nHidden = GetNbNeuronsAtLayer(0);
1706  int nNeurons = nInput+nOutput+nHidden;
1707  int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
1708  int normType = GetNormalizationType();
1709  // Build min/max input/output vectors
1710  vector<double> minInput(nInput);
1711  vector<double> maxInput(nInput);
1712  vector<double> minOutput(nOutput);
1713  vector<double> maxOutput(nOutput);
1714  vector<double> valW(nWeights);
1715  fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
1716  // Write the file
1717  ofstream sourcefile(file.c_str());
1718  // ActivationFunction
1719  if( normType==0 )
1720  { // kMinusOneOne
1721  sourcefile << "#define ActivationFunction(sum) ( tanh(sum) )" << endl;
1722  }
1723  else
1724  { // kCR, kZeroOne
1725  sourcefile << "#define ActivationFunction(sum) ( 1.0 / ( 1.0 + exp( -1.0 * sum )) )" << endl;
1726  }
1727  //
1728  sourcefile << "void " << functionName <<"(double *param, double *res)" << endl;
1729  sourcefile << "{" << endl;
1730  // header
1731  sourcefile << " ////////////////////////////// " << endl;
1732  sourcefile << " //" << endl;
1733  // insert comments in header
1734  header = " // " + header;
1735  size_t pos = 0;
1736  while ((pos = header.find("\n", pos)) != std::string::npos)
1737  {
1738  header.replace(pos, 1, "\n //");
1739  pos += 5;
1740  }
1741  sourcefile << header << endl;
1742  sourcefile << " //" << endl;
1743  sourcefile << " ////////////////////////////// " << endl;
1744  sourcefile << endl;
1745  sourcefile << " int nInput = " << nInput << ";" << endl;
1746  sourcefile << " int nOutput = " << nOutput << ";" << endl;
1747  // sourcefile << " int nWeights = " << _nWeight << ";" << endl;
1748  sourcefile << " int nHidden = " << nHidden << ";" << endl;
1749  sourcefile << " const int nNeurones = " << nNeurons << ";" << endl;
1750  sourcefile << " double " << functionName << "_act[nNeurones];" << endl;
1751  sourcefile << endl;
1752  sourcefile << " // --- Preprocessing of the inputs and outputs" << endl;
1753  sourcefile << " double " << functionName << "_minInput[] = {" << endl << " ";
1754  for(int i=0 ; i<nInput ; i++)
1755  {
1756  sourcefile << minInput[i] << ", ";
1757  if( (i+1)%5==0 )
1758  sourcefile << "\n ";
1759  }
1760  if( nInput%5 != 0 )
1761  sourcefile << endl;
1762  sourcefile << " };" << endl;
1763  //
1764  sourcefile << " double " << functionName << "_minOutput[] = {" << endl << " ";
1765  sourcefile << minOutput[0] << ", ";
1766  sourcefile << " };" << endl;
1767  //
1768  sourcefile << " double " << functionName << "_maxInput[] = {" << endl << " ";
1769  for(int i=0 ; i<nInput ; i++)
1770  {
1771  sourcefile << maxInput[i] << ", ";
1772  if( (i+1)%5==0 )
1773  sourcefile << "\n ";
1774  }
1775  if( nInput%5 != 0 )
1776  sourcefile << endl;
1777  sourcefile << " };" << endl;
1778  //
1779  sourcefile << " double " << functionName << "_maxOutput[] = {" << endl << " ";
1780  sourcefile << maxOutput[0] << ", ";
1781  sourcefile << " };" << endl;
1782  // Weights vector
1783  sourcefile << endl;
1784  sourcefile << " // --- Values of the weights" << endl;
1785  sourcefile << " double " << functionName << "_valW[] = {" << endl << " ";
1786  for(int i=0 ; i<nWeights ; i++)
1787  {
1788  sourcefile << valW[i] << ", ";
1789  if ( (i+1)%5 == 0 )
1790  sourcefile << endl << " ";
1791  }
1792  sourcefile << endl << " };"<<endl;
1793  //
1794  sourcefile << " // --- Constants";
1795  sourcefile << endl;
1796  sourcefile << " int indNeurone = 0;"<<endl;
1797  sourcefile << " int CrtW;"<<endl;
1798  sourcefile << " double sum;"<<endl;
1799 
1800  // couche entree
1801  sourcefile << endl;
1802  sourcefile << " // --- Input Layers"<<endl;
1803  sourcefile << " for(int i = 0; i < nInput; i++) {"<<endl;
1804  if( normType==0 )
1805  { // kMinusOneOne
1806  sourcefile << " " << functionName << "_act[indNeurone++] = 2.0 * ( param[i] - "
1807  << functionName << "_minInput[i] ) / ( " << functionName << "_maxInput[i] - "
1808  << functionName << "_minInput[i] ) - 1.0;"<<endl;
1809  }
1810  else
1811  { // kCR, kZeroOne
1812  sourcefile << " " << functionName << "_act[indNeurone++] = ( param[i] - "
1813  << functionName << "_minInput[i] ) / " << functionName << "_maxInput[i];"
1814  << endl;
1815  }
1816  sourcefile << " }"<<endl;
1817 
1818 
1819  // couche cachee
1820  sourcefile << endl;
1821  sourcefile << " // --- Hidden Layers"<<endl;
1822  sourcefile << " for (int member = 0; member < nHidden; member++) {"<<endl;
1823  sourcefile << " int CrtW = member * ( nInput + 2) + 2;" << endl;
1824  sourcefile << " sum = " << functionName << "_valW[CrtW++];" << endl;
1825  sourcefile << " for (int source = 0; source < nInput; source++) {" << endl;
1826  sourcefile << " sum += " << functionName << "_act[source] * " << functionName << "_valW[CrtW++];" << endl;
1827  sourcefile << " }" << endl;
1828  sourcefile << " " << functionName << "_act[indNeurone++] = ActivationFunction(sum);" << endl;
1829  sourcefile << " }"<<endl;
1830  // couche sortie
1831  sourcefile << endl;
1832  sourcefile << " // --- Output"<<endl;
1833  sourcefile << " for (int member = 0; member < nOutput; member++) {"<<endl;
1834  sourcefile << " sum = " << functionName << "_valW[0];"<<endl;
1835  sourcefile << " for (int source = 0; source < nHidden; source++) {"<<endl;
1836  sourcefile << " CrtW = source * ( nInput + 2) + 1;"<<endl;
1837  sourcefile << " sum += " << functionName << "_act[nInput+source] * " << functionName << "_valW[CrtW];"<<endl;
1838  sourcefile << " }"<<endl;
1839  sourcefile << " " << functionName << "_act[indNeurone++] = sum;"<<endl;
1840  if( normType==0 )
1841  { // kMinusOneOne
1842  sourcefile << " res[member] = " << functionName
1843  << "_minOutput[member] + 0.5 * ( " << functionName
1844  << "_maxOutput[member] - " << functionName
1845  << "_minOutput[member] ) * ( sum + 1.0);" << endl;
1846  }
1847  else
1848  { // kCR, kZeroOne
1849  sourcefile << " res[member] = " << functionName
1850  << "_minOutput[member] + " << functionName
1851  << "_maxOutput[member] * sum;" << endl;
1852  }
1853  sourcefile << " }"<<endl;
1854  //
1855  sourcefile << "}" << endl;
1856  sourcefile.close();
1857 }
void PMMLlib::PMMLlib::ExportNeuralNetworkFortran ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to NeuralNetwork.

Export the current model as a NeuralNetwork function in a Fortran file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 1866 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckNeuralNetwork(), PMMLlib::PMMLlib::fillVectorsForExport(), PMMLlib::PMMLlib::GetNameInput(), PMMLlib::PMMLlib::GetNameOutput(), PMMLlib::PMMLlib::GetNbInputs(), PMMLlib::PMMLlib::GetNbNeuronsAtLayer(), PMMLlib::PMMLlib::GetNbOutputs(), PMMLlib::PMMLlib::GetNormalizationType(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportFortran().

1869 {
1870  CheckNeuralNetwork();
1871 
1872  // Get the different values required
1873  int nInput = GetNbInputs();
1874  int nOutput = GetNbOutputs();
1875  int nHidden = GetNbNeuronsAtLayer(0);
1876  int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
1877  int normType = GetNormalizationType();
1878  // Build min/max input/output vectors
1879  vector<double> minInput(nInput);
1880  vector<double> maxInput(nInput);
1881  vector<double> minOutput(nOutput);
1882  vector<double> maxOutput(nOutput);
1883  vector<double> valW(nWeights);
1884  fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
1885  // Write the file
1886  ofstream sourcefile(file.c_str());
1887 
1888  sourcefile << " SUBROUTINE " << functionName << "(";
1889  for(int i=0 ; i<GetNbInputs() ; i++)
1890  {
1891  sourcefile << GetNameInput(i) << ",";
1892  }
1893  sourcefile << GetNameOutput(0) << ")" << endl;
1894  // header
1895  sourcefile << "C --- *********************************************" << endl;
1896  sourcefile << "C --- " << endl;
1897  // insert comments in header
1898  header = "C --- " + header;
1899  size_t pos = 0;
1900  while ((pos = header.find("\n", pos)) != std::string::npos)
1901  {
1902  header.replace(pos, 1, "\nC --- ");
1903  pos += 5;
1904  }
1905  sourcefile << header << endl;
1906  sourcefile << "C --- " << endl;
1907  sourcefile << "C --- *********************************************" << endl;
1908 
1909  sourcefile << " IMPLICIT DOUBLE PRECISION (V)" << endl;
1910  for(int i=0 ; i<GetNbInputs() ; i++)
1911  {
1912  sourcefile << " DOUBLE PRECISION " << GetNameInput(i) << endl;
1913  }
1914  sourcefile << " DOUBLE PRECISION " << GetNameOutput(0) << endl;
1915  sourcefile << endl;
1916 
1917  sourcefile << "C --- Preprocessing of the inputs" << endl;
1918  for(int i=0 ; i<GetNbInputs() ; i++)
1919  {
1920  sourcefile << " VXN" << GetNameInput(i) << " = ";
1921 
1922  if( normType==0 )
1923  { // kMinusOneOne
1924  sourcefile << "2.D0 * ( " << GetNameInput(i) << " - " << minInput[i] << "D0 ) / " << maxInput[i] - minInput[i] << "D0 - 1.0" << endl;
1925  }
1926  else
1927  { // kCR, kZeroOne
1928  sourcefile << "( " << GetNameInput(i) << " - " << minInput[i] << "D0 ) / " << maxInput[i] << "D0" << endl;
1929  }
1930  }
1931 
1932  // Weights vector
1933  sourcefile << endl;
1934  sourcefile << "C --- Values of the weights" << endl;
1935  for(int i=0 ; i<nWeights ; i++)
1936  {
1937  sourcefile << " VW" << i+1 << " = " << valW[i] << endl;
1938  }
1939  // Loop on hidden neurons
1940  sourcefile << endl;
1941  for(int member = 0; member < nHidden; member++)
1942  {
1943  sourcefile << "C --- hidden neural number " << member+1 << endl;
1944  int CrtW = member * ( nInput + 2) + 3;
1945  sourcefile << " VAct" << member+1 << " = VW" << CrtW++ << endl;
1946  for (int source = 0; source < nInput; source++)
1947  {
1948  sourcefile << " 1 + VW"<< CrtW++ << " * VXN" << GetNameInput(source) << endl;
1949  }
1950  sourcefile << endl;
1951 
1952 
1953  if( normType==0 )
1954  { // kMinusOneOne
1955  sourcefile << " VPot" << member+1 << " = 2.D0 / (1.D0 + DEXP(-2.D0 * VAct" << member+1 <<")) - 1.D0" << endl;
1956  }
1957  else
1958  { // kCR, kZeroOne
1959  sourcefile << " VPot" << member+1 << " = 1.D0 / (1.D0 + DEXP(-1.D0 * VAct" << member+1 <<"))" << endl;
1960  }
1961  sourcefile << endl;
1962  }
1963 
1964  // Ouput of the model
1965  sourcefile << "C --- Output" << endl;
1966  sourcefile << " VOut = VW1" << endl;
1967  for(int source=0 ; source < nHidden ; source++)
1968  {
1969  int CrtW = source * ( nInput + 2) + 2;
1970  sourcefile << " 1 + VW"<< CrtW << " * VPot" << source+1 << endl;
1971  }
1972 
1973  // Denormalize Output
1974  sourcefile << endl;
1975  sourcefile << "C --- Pretraitment of the output" << endl;
1976  if( normType==0 )
1977  { // kMinusOneOne
1978  sourcefile << " VDelta = " << 0.5*(maxOutput[0]-minOutput[0]) << "D0 * ( VOut + 1.0D0)" << endl;
1979  sourcefile << " " << GetNameOutput(0) << " = " << minOutput[0] << "D0 + VDelta" << endl;
1980 
1981  }
1982  else
1983  { // kCR, kZeroOne
1984  sourcefile << " " << GetNameOutput(0) << " = "<< minOutput[0] << "D0 + " << maxOutput[0] << "D0 * VOut;" << endl;
1985  }
1986 
1987  sourcefile << endl;
1988  sourcefile << "C --- " << endl;
1989  sourcefile << " RETURN" << endl;
1990  sourcefile << " END" << endl;
1991 
1992  sourcefile.close();
1993 }
std::string PMMLlib::PMMLlib::ExportNeuralNetworkPyStr ( std::string  functionName,
std::string  header 
)
private

Specific to NeuralNetwork.

Export the current model as a function in a Python string.

Parameters
functionNameName of the function
headerHeader of the function
Returns
Function as a string

Definition at line 2021 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::CheckNeuralNetwork(), PMMLlib::PMMLlib::fillVectorsForExport(), PMMLlib::PMMLlib::GetNbInputs(), PMMLlib::PMMLlib::GetNbNeuronsAtLayer(), PMMLlib::PMMLlib::GetNbOutputs(), PMMLlib::PMMLlib::GetNormalizationType(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportNeuralNetworkPython(), and PMMLlib::PMMLlib::ExportPyStr().

2023 {
2025 
2026  ostringstream out;
2027 
2028  // Get the different values required
2029  int nInput = GetNbInputs();
2030  int nOutput = GetNbOutputs();
2031  int nHidden = GetNbNeuronsAtLayer(0);
2032  int nNeurons = nInput+nOutput+nHidden;
2033  int nWeights = nHidden*(nInput+nOutput+1)+nOutput;
2034  int normType = GetNormalizationType();
2035  // Build min/max input/output vectors
2036  vector<double> minInput(nInput);
2037  vector<double> maxInput(nInput);
2038  vector<double> minOutput(nOutput);
2039  vector<double> maxOutput(nOutput);
2040  vector<double> valW(nWeights);
2041  fillVectorsForExport(nInput,nOutput,nHidden,normType,minInput,maxInput,minOutput,maxOutput,valW);
2042 
2043  // Shebang et imports
2044  out << "#!/usr/bin/env python" << endl;
2045  out << "# -*- coding: utf-8 -*-" << endl;
2046  out << endl;
2047  out << "from math import tanh, exp" << endl;
2048  out << endl;
2049 
2050  // ActivationFunction
2051  if( normType==0 )
2052  { // kMinusOneOne
2053  out << "def ActivationFunction(sum): " << endl;
2054  out << " return tanh(sum); " << endl;
2055  }
2056  else
2057  { // kCR, kZeroOne
2058  out << "def ActivationFunction(sum): " << endl;
2059  out << " return ( 1.0 / ( 1.0 + exp( -1.0 * sum ) ) ); " << endl;
2060  }
2061 
2062  out << endl;
2063  out << "def " << functionName <<"(param):" << endl;
2064  out << endl;
2065 
2066  // header
2067  out << " ############################## " << endl;
2068  out << " #" << endl;
2069  // insert comments in header
2070  header = " # " + header;
2071  size_t pos = 0;
2072  while ((pos = header.find("\n", pos)) != std::string::npos)
2073  {
2074  header.replace(pos, 1, "\n #");
2075  pos += 5;
2076  }
2077  out << header << endl;
2078  out << " #" << endl;
2079  out << " ############################## " << endl;
2080  out << endl;
2081 
2082  // Initialisations
2083  out << " nInput = " << nInput << ";" << endl;
2084  out << " nOutput = " << nOutput << ";" << endl;
2085  out << " nHidden = " << nHidden << ";" << endl;
2086  out << " nNeurones = " << nNeurons << ";" << endl;
2087  out << " " << functionName << "_act = [];" << endl;
2088  out << " res = [];" << endl;
2089  out << endl;
2090 
2091  out << " # --- Preprocessing of the inputs and outputs" << endl;
2092  out << " " << functionName << "_minInput = [" << endl << " ";
2093  out << " " ;
2094  for(int i=0 ; i<nInput ; i++)
2095  {
2096  out << minInput[i] << ", ";
2097  if( (i+1)%5==0 )
2098  {
2099  out << endl ;
2100  out << " " ;
2101  }
2102  }
2103  out << endl << " ];" << endl;
2104 
2105  out << " " << functionName << "_minOutput = [" << endl << " ";
2106  out << " " << minOutput[0] ;
2107  out << endl << " ];" << endl;
2108 
2109  out << " " << functionName << "_maxInput = [" << endl << " ";
2110  for(int i=0 ; i<nInput ; i++)
2111  {
2112  out << maxInput[i] << ", ";
2113  if( (i+1)%5==0 )
2114  {
2115  out << endl;
2116  out << " " ;
2117  }
2118  }
2119  out << endl << " ];" << endl;
2120 
2121  out << " " << functionName << "_maxOutput = [" << endl << " ";
2122  out << " " << maxOutput[0] ;
2123  out << endl << " ];" << endl;
2124 
2125  // Weights vector
2126  out << " # --- Values of the weights" << endl;
2127  out << " " << functionName << "_valW = [" << endl << " ";
2128  for(int i=0 ; i<nWeights ; i++)
2129  {
2130  out << valW[i] << ", ";
2131  if ( (i+1)%5 == 0 )
2132  {
2133  out << endl;
2134  out << " " ;
2135  }
2136  }
2137  out << endl << " ];"<<endl;
2138 
2139  out << " # --- Constants" << endl;
2140  out << " indNeurone = 0;" << endl;
2141  out << endl;
2142 
2143  // couche entree
2144  out << " # --- Input Layers" << endl;
2145  out << " for i in range(nInput) :" << endl;
2146  if( normType==0 )
2147  { // kMinusOneOne
2148  out << " " << functionName << "_act.append( 2.0 * ( param[i] - "
2149  << functionName << "_minInput[i] ) / ( " << functionName << "_maxInput[i] - "
2150  << functionName << "_minInput[i] ) - 1.0 ) ;"
2151  << endl;
2152  }
2153  else
2154  { // kCR, kZeroOne
2155  out << " " << functionName << "_act.append( ( param[i] - "
2156  << functionName << "_minInput[i] ) / " << functionName << "_maxInput[i] ) ;"
2157  << endl;
2158  }
2159  out << " indNeurone += 1 ;" << endl;
2160  out << " pass" << endl;
2161 
2162  // couche cachee
2163  out << endl;
2164  out << " # --- Hidden Layers" << endl;
2165  out << " for member in range(nHidden):" << endl;
2166  out << " CrtW = member * ( nInput + 2) + 2;" << endl;
2167  out << " sum = " << functionName << "_valW[CrtW];" << endl;
2168  out << " CrtW += 1 ;" << endl;
2169  out << " for source in range(nInput) :" << endl;
2170  out << " sum += " << functionName << "_act[source] * " << functionName << "_valW[CrtW];" << endl;
2171  out << " CrtW += 1 ;" << endl;
2172  out << " pass" << endl;
2173  out << " " << functionName << "_act.append( ActivationFunction(sum) ) ;" << endl;
2174  out << " indNeurone += 1 ;" << endl;
2175  out << " pass" << endl;
2176  out << endl;
2177 
2178  // couche sortie
2179  out << " # --- Output"<<endl;
2180  out << " for member in range(nOutput):" << endl;
2181  out << " sum = " << functionName << "_valW[0];" << endl;
2182  out << " for source in range(nHidden):" << endl;
2183  out << " CrtW = source * ( nInput + 2) + 1;"<<endl;
2184  out << " sum += " << functionName << "_act[nInput+source] * " << functionName << "_valW[CrtW];" << endl;
2185  out << " pass" << endl;
2186  out << " " << functionName << "_act.append( sum );" << endl;
2187  out << " indNeurone += 1 ;" << endl;
2188  if( normType==0 )
2189  { // kMinusOneOne
2190  out << " res[member] = " << functionName
2191  << "_minOutput[member] + 0.5 * ( " << functionName
2192  << "_maxOutput[member] - " << functionName
2193  << "_minOutput[member] ) * ( sum + 1.0);" << endl;
2194  }
2195  else
2196  { // kCR, kZeroOne
2197  out << " res.append( " << functionName
2198  << "_minOutput[member] + " << functionName
2199  << "_maxOutput[member] * sum );" << endl;
2200  }
2201  out << " pass" << endl;
2202  out << endl;
2203 
2204  // return result
2205  out << " return res;" << endl << endl;
2206  out << endl;
2207 
2208  return out.str();
2209 }
void PMMLlib::PMMLlib::ExportNeuralNetworkPython ( std::string  file,
std::string  functionName,
std::string  header 
)
private

Specific to NeuralNetwork.

Export the current model as a NeuralNetwork function in a Python file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 2002 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::ExportNeuralNetworkPyStr().

Referenced by PMMLlib::PMMLlib::ExportPython().

2005 {
2006  string str(ExportNeuralNetworkPyStr(functionName, header));
2007  // Write the file
2008  ofstream exportfile(file.c_str());
2009  exportfile << str;
2010  exportfile.close();
2011 }
std::string PMMLlib::PMMLlib::ExportPyStr ( std::string  functionName,
std::string  header 
)

Export the current model as a function in a Python string.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function
Returns
Function as a string

Definition at line 654 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::_currentModelType, PMMLlib::PMMLlib::ExportLinearRegressionPyStr(), PMMLlib::PMMLlib::ExportNeuralNetworkPyStr(), PMMLlib::kANN, and PMMLlib::kLR.

656 {
657  if ( _currentModelType == kANN )
658  return ExportNeuralNetworkPyStr(functionName, header);
659  else if ( _currentModelType == kLR )
660  return ExportLinearRegressionPyStr(functionName, header);
661  else
662  throw string("ExportPyStr : PMML type not handled.");
663 }
void PMMLlib::PMMLlib::ExportPython ( std::string  file,
std::string  functionName,
std::string  header 
)

Export the current model as a function in a Python file.

Parameters
fileName of the file
functionNameName of the function
headerHeader of the function

Definition at line 635 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::_currentModelType, PMMLlib::PMMLlib::ExportLinearRegressionPython(), PMMLlib::PMMLlib::ExportNeuralNetworkPython(), PMMLlib::kANN, and PMMLlib::kLR.

638 {
639  if ( _currentModelType == kANN )
640  ExportNeuralNetworkPython(file,functionName, header);
641  else if ( _currentModelType == kLR )
642  ExportLinearRegressionPython(file,functionName, header);
643  else
644  throw string("ExportPython : PMML type not handled.");
645 }
void PMMLlib::PMMLlib::fillVectorsForExport ( int  nInput,
int  nOutput,
int  nHidden,
int  normType,
std::vector< double > &  minInput,
std::vector< double > &  maxInput,
std::vector< double > &  minOutput,
std::vector< double > &  maxOutput,
std::vector< double > &  valW 
)
private

Specific to NeuralNetwork.

Fill the vectors used by the ExportXXX methods.

Parameters
nInput
nOutput
nHidden
normType
minInput
maxInput
minOutput
maxOutput
valW

Definition at line 1603 of file PMMLlib.cxx.

References PMMLlib::PMMLlib::_currentModelNode, PMMLlib::PMMLlib::_getProp(), PMMLlib::PMMLlib::CheckNeuralNetwork(), PMMLlib::PMMLlib::GetChildByName(), PMMLlib::PMMLlib::GetNeuronBias(), PMMLlib::PMMLlib::GetPrecNeuronSynapse(), and CORBAEngineTest::i.

Referenced by PMMLlib::PMMLlib::ExportNeuralNetworkCpp(), PMMLlib::PMMLlib::ExportNeuralNetworkFortran(), and PMMLlib::PMMLlib::ExportNeuralNetworkPyStr().

1612 {
1614 
1615  xmlNodePtr netNode = _currentModelNode ;
1616  // Get the different values required
1617  // Build min/max input/output vectors
1618  for(int i=0 ; i<nInput ; i++)
1619  {
1620  xmlNodePtr node_inputs = GetChildByName(netNode,"NeuralInputs");
1621  node_inputs = node_inputs->children;
1622  for(int j = 0;j<i;j++)
1623  {
1624  node_inputs = node_inputs->next;
1625  }
1626  node_inputs = node_inputs->children; // DerivedField
1627  node_inputs = node_inputs->children; // NormContinuous
1628  node_inputs = node_inputs->children; // LinearNorm
1629  string strOrig1 = _getProp(node_inputs, string("orig") );
1630  double orig1 = atof( strOrig1.c_str() );
1631  string strNorm1 = _getProp(node_inputs, string("norm") );
1632  double norm1 = atof( strNorm1.c_str() );
1633  node_inputs = node_inputs->next;
1634  string strOrig2 = _getProp(node_inputs, string("orig") );
1635  double orig2 = atof( strOrig2.c_str() );
1636  string strNorm2 = _getProp(node_inputs, string("norm") );
1637  if( normType==0 )
1638  { // kMinusOneOne
1639  minInput[i] = orig1;
1640  maxInput[i] = orig2;
1641  }
1642  else
1643  { // kCR, kZeroOne
1644  minInput[i] = orig2;
1645  maxInput[i] = -1.0*norm1*orig2;
1646  }
1647  }
1648  xmlNodePtr node_outputs = GetChildByName(netNode,"NeuralOutputs");
1649  node_outputs = node_outputs->children;
1650  node_outputs = node_outputs->children; // DerivedField
1651  node_outputs = node_outputs->children; // NormContinuous
1652  node_outputs = node_outputs->children; // LinearNorm
1653  string strOrig1 = _getProp(node_outputs, string("orig") );
1654  double orig1 = atof( strOrig1.c_str() );
1655  string strNorm1 = _getProp(node_outputs, string("norm") );
1656  double norm1 = atof( strNorm1.c_str() );
1657  node_outputs = node_outputs->next;
1658  string strOrig2 = _getProp(node_outputs, string("orig") );
1659  double orig2 = atof( strOrig2.c_str() );
1660  if( normType==0 )
1661  { // kMinusOneOne
1662  minOutput[0] = orig1;
1663  maxOutput[0] = orig2;
1664  }
1665  else
1666  { // kCR, kZeroOne
1667  minOutput[0] = orig2;
1668  maxOutput[0] = -1.0*norm1*orig2;
1669  }
1670  // Build weight vector
1671  for(int j=0 ; j<nHidden ; j++) // hidden layers
1672  {
1673  valW[j*(nInput+nOutput+1)+2] = GetNeuronBias( 0, j);
1674  for(int i=0 ; i<nInput ; i++)
1675  {
1676  valW[j*(nInput+nOutput+1)+3+i] = GetPrecNeuronSynapse( 0, j, i);
1677  }
1678  }
1679  for(int j=0 ; j<nOutput ; j++) // output layers
1680  {
1681  valW[0] = GetNeuronBias( 1, j);
1682  for(int i=0 ; i<nHidden ; i++)
1683  {
1684  valW[i*(nInput+nOutput+1)+1] = GetPrecNeuronSynapse( 1, j, i);
1685  }
1686  }
1687 }