IntegralOperatorEigenContainer.java

  1. package org.drip.learning.kernel;

  2. /*
  3.  * -*- mode: java; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
  4.  */

  5. /*!
  6.  * Copyright (C) 2020 Lakshmi Krishnamurthy
  7.  * Copyright (C) 2019 Lakshmi Krishnamurthy
  8.  * Copyright (C) 2018 Lakshmi Krishnamurthy
  9.  * Copyright (C) 2017 Lakshmi Krishnamurthy
  10.  * Copyright (C) 2016 Lakshmi Krishnamurthy
  11.  * Copyright (C) 2015 Lakshmi Krishnamurthy
  12.  *
  13.  *  This file is part of DROP, an open-source library targeting analytics/risk, transaction cost analytics,
  14.  *      asset liability management analytics, capital, exposure, and margin analytics, valuation adjustment
  15.  *      analytics, and portfolio construction analytics within and across fixed income, credit, commodity,
  16.  *      equity, FX, and structured products. It also includes auxiliary libraries for algorithm support,
  17.  *      numerical analysis, numerical optimization, spline builder, model validation, statistical learning,
  18.  *      and computational support.
  19.  *  
  20.  *      https://lakshmidrip.github.io/DROP/
  21.  *  
  22.  *  DROP is composed of three modules:
  23.  *  
  24.  *  - DROP Product Core - https://lakshmidrip.github.io/DROP-Product-Core/
  25.  *  - DROP Portfolio Core - https://lakshmidrip.github.io/DROP-Portfolio-Core/
  26.  *  - DROP Computational Core - https://lakshmidrip.github.io/DROP-Computational-Core/
  27.  *
  28.  *  DROP Product Core implements libraries for the following:
  29.  *  - Fixed Income Analytics
  30.  *  - Loan Analytics
  31.  *  - Transaction Cost Analytics
  32.  *
  33.  *  DROP Portfolio Core implements libraries for the following:
  34.  *  - Asset Allocation Analytics
  35.  *  - Asset Liability Management Analytics
  36.  *  - Capital Estimation Analytics
  37.  *  - Exposure Analytics
  38.  *  - Margin Analytics
  39.  *  - XVA Analytics
  40.  *
  41.  *  DROP Computational Core implements libraries for the following:
  42.  *  - Algorithm Support
  43.  *  - Computation Support
  44.  *  - Function Analysis
  45.  *  - Model Validation
  46.  *  - Numerical Analysis
  47.  *  - Numerical Optimizer
  48.  *  - Spline Builder
  49.  *  - Statistical Learning
  50.  *
  51.  *  Documentation for DROP is Spread Over:
  52.  *
  53.  *  - Main                     => https://lakshmidrip.github.io/DROP/
  54.  *  - Wiki                     => https://github.com/lakshmiDRIP/DROP/wiki
  55.  *  - GitHub                   => https://github.com/lakshmiDRIP/DROP
  56.  *  - Repo Layout Taxonomy     => https://github.com/lakshmiDRIP/DROP/blob/master/Taxonomy.md
  57.  *  - Javadoc                  => https://lakshmidrip.github.io/DROP/Javadoc/index.html
  58.  *  - Technical Specifications => https://github.com/lakshmiDRIP/DROP/tree/master/Docs/Internal
  59.  *  - Release Versions         => https://lakshmidrip.github.io/DROP/version.html
  60.  *  - Community Credits        => https://lakshmidrip.github.io/DROP/credits.html
  61.  *  - Issues Catalog           => https://github.com/lakshmiDRIP/DROP/issues
  62.  *  - JUnit                    => https://lakshmidrip.github.io/DROP/junit/index.html
  63.  *  - Jacoco                   => https://lakshmidrip.github.io/DROP/jacoco/index.html
  64.  *
  65.  *  Licensed under the Apache License, Version 2.0 (the "License");
  66.  *      you may not use this file except in compliance with the License.
  67.  *  
  68.  *  You may obtain a copy of the License at
  69.  *      http://www.apache.org/licenses/LICENSE-2.0
  70.  *  
  71.  *  Unless required by applicable law or agreed to in writing, software
  72.  *      distributed under the License is distributed on an "AS IS" BASIS,
  73.  *      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  74.  *  
  75.  *  See the License for the specific language governing permissions and
  76.  *      limitations under the License.
  77.  */

  78. /**
  79.  * <i>IntegralOperatorEigenContainer</i> holds the Group of Eigen-Components that result from the
  80.  * Eigenization of the R<sup>x</sup> L<sub>2</sub> To R<sup>x</sup> L<sub>2</sub> Kernel Linear Integral
  81.  * Operator defined by:
  82.  *
  83.  *      T_k [f(.)] := Integral Over Input Space {k (., y) * f(y) * d[Prob(y)]}
  84.  *  
  85.  * <br><br>
  86.  *  The References are:
  87.  * <br><br>
  88.  * <ul>
  89.  *  <li>
  90.  *      Ash, R. (1965): <i>Information Theory</i> <b>Inter-science</b> New York
  91.  *  </li>
  92.  *  <li>
  93.  *      Carl, B., and I. Stephani (1990): <i>Entropy, Compactness, and Approximation of Operators</i>
  94.  *          <b>Cambridge University Press</b> Cambridge UK
  95.  *  </li>
  96.  *  <li>
  97.  *      Gordon, Y., H. Konig, and C. Schutt (1987): Geometric and Probabilistic Estimates of Entropy and
  98.  *          Approximation Numbers of Operators <i>Journal of Approximation Theory</i> <b>49</b> 219-237
  99.  *  </li>
  100.  *  <li>
  101.  *      Konig, H. (1986): <i>Eigenvalue Distribution of Compact Operators</i> <b>Birkhauser</b> Basel,
  102.  *          Switzerland
  103.  *  </li>
  104.  *  <li>
  105.  *      Smola, A. J., A. Elisseff, B. Scholkopf, and R. C. Williamson (2000): Entropy Numbers for Convex
  106.  *          Combinations and mlps, in: <i>Advances in Large Margin Classifiers, A. Smola, P. Bartlett, B.
  107.  *          Scholkopf, and D. Schuurmans - editors</i> <b>MIT Press</b> Cambridge, MA
  108.  *  </li>
  109.  * </ul>
  110.  *
  111.  *  <br><br>
  112.  *  <ul>
  113.  *      <li><b>Module </b> = <a href = "https://github.com/lakshmiDRIP/DROP/tree/master/ComputationalCore.md">Computational Core Module</a></li>
  114.  *      <li><b>Library</b> = <a href = "https://github.com/lakshmiDRIP/DROP/tree/master/StatisticalLearningLibrary.md">Statistical Learning</a></li>
  115.  *      <li><b>Project</b> = <a href = "https://github.com/lakshmiDRIP/DROP/tree/master/src/main/java/org/drip/learning">Agnostic Learning Bounds under Empirical Loss Minimization Schemes</a></li>
  116.  *      <li><b>Package</b> = <a href = "https://github.com/lakshmiDRIP/DROP/tree/master/src/main/java/org/drip/learning/kernel">Statistical Learning Banach Mercer Kernels</a></li>
  117.  *  </ul>
  118.  *
  119.  * @author Lakshmi Krishnamurthy
  120.  */

  121. public class IntegralOperatorEigenContainer {
  122.     private org.drip.learning.kernel.IntegralOperatorEigenComponent[] _aIOEC = null;

  123.     /**
  124.      * IntegralOperatorEigenContainer Constructor
  125.      *
  126.      * @param aIOEC Array of the Integral Operator Eigen-Components
  127.      *
  128.      * @throws java.lang.Exception Thrown if the Inputs are Invalid
  129.      */

  130.     public IntegralOperatorEigenContainer (
  131.         final org.drip.learning.kernel.IntegralOperatorEigenComponent[] aIOEC)
  132.         throws java.lang.Exception
  133.     {
  134.         if (null == (_aIOEC = aIOEC) || 0 == _aIOEC.length)
  135.             throw new java.lang.Exception ("IntegralOperatorEigenContainer ctr: Invalid Inputs");
  136.     }

  137.     /**
  138.      * Retrieve the Array of the Integral Operator Eigen-Components
  139.      *
  140.      * @return The Array of the Integral Operator Eigen-Components
  141.      */

  142.     public org.drip.learning.kernel.IntegralOperatorEigenComponent[] eigenComponents()
  143.     {
  144.         return _aIOEC;
  145.     }

  146.     /**
  147.      * Retrieve the Eigen Input Space
  148.      *
  149.      * @return The Eigen Input Space
  150.      */

  151.     public org.drip.spaces.metric.RdNormed inputMetricVectorSpace()
  152.     {
  153.         return _aIOEC[0].eigenFunction().inputMetricVectorSpace();
  154.     }

  155.     /**
  156.      * Retrieve the Eigen Output Space
  157.      *
  158.      * @return The Eigen Output Space
  159.      */

  160.     public org.drip.spaces.metric.R1Normed outputMetricVectorSpace()
  161.     {
  162.         return _aIOEC[0].eigenFunction().outputMetricVectorSpace();
  163.     }

  164.     /**
  165.      * Generate the Diagonally Scaled Normed Vector Space of the RKHS Feature Space Bounds that results on
  166.      *  applying the Diagonal Scaling Operator
  167.      *
  168.      * @param dso The Diagonal Scaling Operator
  169.      *
  170.      * @return The Diagonally Scaled Normed Vector Space of the RKHS Feature Space
  171.      */

  172.     public org.drip.spaces.metric.R1Combinatorial diagonallyScaledFeatureSpace (
  173.         final org.drip.learning.kernel.DiagonalScalingOperator dso)
  174.     {
  175.         if (null == dso) return null;

  176.         double[] adblDiagonalScalingOperator = dso.scaler();

  177.         int iDimension = adblDiagonalScalingOperator.length;

  178.         if (iDimension != _aIOEC.length) return null;

  179.         java.util.List<java.lang.Double> lsElementSpace = new java.util.ArrayList<java.lang.Double>();

  180.         for (int i = 0; i < iDimension; ++i)
  181.             lsElementSpace.add (0.5 * _aIOEC[i].rkhsFeatureParallelepipedLength() /
  182.                 adblDiagonalScalingOperator[i]);

  183.         try {
  184.             return new org.drip.spaces.metric.R1Combinatorial (lsElementSpace, null, 2);
  185.         } catch (java.lang.Exception e) {
  186.             e.printStackTrace();
  187.         }

  188.         return null;
  189.     }

  190.     /**
  191.      * Generate the Operator Class Covering Number Bounds of the RKHS Feature Space Bounds that result on the
  192.      *  Application of the Diagonal Scaling Operator
  193.      *
  194.      * @param dso The Diagonal Scaling Operator
  195.      *
  196.      * @return The Operator Class Covering Number Bounds of the RKHS Feature Space
  197.      */

  198.     public org.drip.spaces.cover.OperatorClassCoveringBounds scaledCoveringNumberBounds (
  199.         final org.drip.learning.kernel.DiagonalScalingOperator dso)
  200.     {
  201.         final org.drip.spaces.metric.R1Combinatorial r1ContinuousScaled = diagonallyScaledFeatureSpace (dso);

  202.         if (null == r1ContinuousScaled) return null;

  203.         try {
  204.             final double dblPopulationMetricNorm = r1ContinuousScaled.populationMetricNorm();

  205.             org.drip.spaces.cover.OperatorClassCoveringBounds occb = new
  206.                 org.drip.spaces.cover.OperatorClassCoveringBounds() {
  207.                 @Override public double entropyNumberLowerBound()
  208.                     throws java.lang.Exception
  209.                 {
  210.                     return dso.entropyNumberLowerBound() * dblPopulationMetricNorm;
  211.                 }

  212.                 @Override public double entropyNumberUpperBound()
  213.                     throws java.lang.Exception
  214.                 {
  215.                     return dso.entropyNumberUpperBound() * dblPopulationMetricNorm;
  216.                 }

  217.                 @Override public int entropyNumberIndex()
  218.                 {
  219.                     return dso.entropyNumberIndex();
  220.                 }

  221.                 @Override public double norm()
  222.                     throws java.lang.Exception
  223.                 {
  224.                     return dso.norm() * dblPopulationMetricNorm;
  225.                 }

  226.                 @Override public org.drip.learning.bound.DiagonalOperatorCoveringBound
  227.                     entropyNumberAsymptote()
  228.                 {
  229.                     return dso.entropyNumberAsymptote();
  230.                 }
  231.             };

  232.             return occb;
  233.         } catch (java.lang.Exception e) {
  234.             e.printStackTrace();
  235.         }

  236.         return null;
  237.     }
  238. }