Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
74 changes: 74 additions & 0 deletions chainladder/methods/benktander.py
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,48 @@ class Benktander(MethodBase):
The ultimate losses per the method
ibnr_: Triangle
The IBNR per the method

Examples
--------
Benktander is the iterated Bornhuetter-Ferguson model. Like BF, it
requires a per-origin apriori expected ultimate supplied through
``sample_weight``. The ``n_iters`` parameter interpolates between BF
(``n_iters=1``) and chainladder (``n_iters`` large): each additional
iteration shifts the ultimate further toward the chainladder estimate.

>>> tr = cl.load_sample('ukmotor')
>>> apriori = cl.Chainladder().fit(tr).ultimate_ * 0 + 14000

With ``n_iters=1`` Benktander reproduces Bornhuetter-Ferguson exactly.

>>> cl.Benktander(apriori=1.0, n_iters=1).fit(
... tr, sample_weight=apriori
... ).ultimate_
2261
2007 12690.000000
2008 13121.098503
2009 14028.278620
2010 13272.048822
2011 13911.968891
2012 15614.145287
2013 16029.501746

Increasing ``n_iters`` pulls the immature origins toward the chainladder
estimate. The 2013 origin shows this most: ``16029`` at ``n_iters=1``,
rising to ``19110`` at ``n_iters=4`` and approaching the chainladder
ultimate of ``20680``.

>>> cl.Benktander(apriori=1.0, n_iters=4).fit(
... tr, sample_weight=apriori
... ).ultimate_
2261
2007 12690.000000
2008 13096.902490
2009 14030.535854
2010 13138.365841
2011 13880.984774
2012 16719.527550
2013 19110.806503
"""

def __init__(self, apriori=1.0, n_iters=1, apriori_sigma=0, random_state=None):
Expand All @@ -58,6 +100,16 @@ def fit(self, X, y=None, sample_weight=None):
-------
self: object
Returns the instance itself.

Examples
--------
Fit returns the estimator itself, with ``ultimate_`` populated. The
repr shows non-default parameters.

>>> tr = cl.load_sample('ukmotor')
>>> apriori = cl.Chainladder().fit(tr).ultimate_ * 0 + 14000
>>> cl.Benktander(apriori=1.0, n_iters=2).fit(tr, sample_weight=apriori)
Benktander(n_iters=2)
"""
if sample_weight is None:
raise ValueError("sample_weight is required.")
Expand All @@ -81,6 +133,28 @@ def predict(self, X, sample_weight=None):
-------
X_new: Triangle
Loss data with Benktander ultimate applied

Examples
--------
Fit on a prior-period view of the data, then apply the model to the
current Triangle and a refreshed apriori.

>>> tr = cl.load_sample('ukmotor')
>>> tr_prior = tr[tr.valuation < tr.valuation_date]
>>> apriori_prior = cl.Chainladder().fit(tr_prior).ultimate_ * 0 + 14000
>>> apriori = cl.Chainladder().fit(tr).ultimate_ * 0 + 14000
>>> model = cl.Benktander(apriori=1.0, n_iters=2).fit(
... tr_prior, sample_weight=apriori_prior
... )
>>> model.predict(tr, sample_weight=apriori).ultimate_
2261
2007 12690.000000
2008 12746.000000
2009 13642.189922
2010 12740.812082
2011 13516.188545
2012 15914.716737
2013 17193.715555
"""
X_new = super().predict(X, sample_weight)
X_new.expectation_ = self._get_benktander_aprioris(X, sample_weight)
Expand Down
70 changes: 70 additions & 0 deletions chainladder/methods/bornferg.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,45 @@ class BornhuetterFerguson(Benktander):
The ultimate losses per the method
ibnr_: Triangle
The IBNR per the method

Examples
--------
Bornhuetter-Ferguson requires an apriori expected ultimate per origin,
supplied through ``sample_weight``. ``sample_weight`` must be a
chainladder Triangle aligned with ``X``, not a scalar; passing
``sample_weight=14000`` would raise ``AttributeError`` because the model
accesses ``.shape``.

A common idiom for building a flat per-origin apriori is to take any
same-shape Triangle, zero it out, and add the desired value. Below uses
the chainladder ultimate as the shape donor.

>>> tr = cl.load_sample('ukmotor')
>>> cl_ult = cl.Chainladder().fit(tr).ultimate_
>>> apriori = cl_ult * 0 + float(cl_ult.sum()) / 7
>>> apriori
2261
2007 14903.967562
2008 14903.967562
2009 14903.967562
2010 14903.967562
2011 14903.967562
2012 14903.967562
2013 14903.967562

Fit with that apriori. The BF ultimates pull the immature origins toward
the apriori while leaving mature origins close to chainladder.

>>> model = cl.BornhuetterFerguson(apriori=1.0).fit(tr, sample_weight=apriori)
>>> model.ultimate_
2261
2007 12690.000000
2008 13145.318280
2009 14095.125641
2010 13412.748068
2011 14150.549749
2012 15999.244850
2013 16658.824705
"""

def __init__(self, apriori=1.0, apriori_sigma=0.0, random_state=None):
Expand All @@ -54,6 +93,15 @@ def fit(self, X, y=None, sample_weight=None):
-------
self : object
Returns the instance itself.

Examples
--------
Fit returns the estimator itself, with ``ultimate_`` populated.

>>> tr = cl.load_sample('ukmotor')
>>> apriori = cl.Chainladder().fit(tr).ultimate_ * 0 + 14000
>>> cl.BornhuetterFerguson(apriori=1.0).fit(tr, sample_weight=apriori)
BornhuetterFerguson()
"""
self.n_iters = 1
super().fit(X, y, sample_weight)
Expand All @@ -73,5 +121,27 @@ def predict(self, X, sample_weight=None):
-------
X_new: Triangle
Loss data with Bornhuetter-Ferguson ultimate applied

Examples
--------
Fit on a prior-period view of the data, then apply the model to the
current Triangle and a refreshed apriori.

>>> tr = cl.load_sample('ukmotor')
>>> tr_prior = tr[tr.valuation < tr.valuation_date]
>>> apriori_prior = cl.Chainladder().fit(tr_prior).ultimate_ * 0 + 14000
>>> apriori = cl.Chainladder().fit(tr).ultimate_ * 0 + 14000
>>> model = cl.BornhuetterFerguson(apriori=1.0).fit(
... tr_prior, sample_weight=apriori_prior
... )
>>> model.predict(tr, sample_weight=apriori).ultimate_
2261
2007 12690.000000
2008 12746.000000
2009 13658.425101
2010 12883.599658
2011 13610.582796
2012 15360.020613
2013 15893.717063
"""
return super().predict(X, sample_weight)
83 changes: 83 additions & 0 deletions chainladder/methods/capecod.py
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,57 @@ class CapeCod(Benktander):
The trended apriori vector developed by the Cape Cod Method
detrended_apriori_:
The detrended apriori vector developed by the Cape Cod Method

Examples
--------
Unlike Bornhuetter-Ferguson and Benktander, CapeCod derives the apriori
loss ratio from the data itself. ``sample_weight`` represents exposure
(e.g. earned premium) rather than an apriori expected ultimate.

>>> tr = cl.load_sample('ukmotor')
>>> exposure = cl.Chainladder().fit(tr).ultimate_ * 0 + 20000

With default ``decay=1`` and ``trend=0``, every origin receives the same
apriori loss ratio: the exposure-weighted mean loss ratio across all
origins.

>>> model = cl.CapeCod().fit(tr, sample_weight=exposure)
>>> model.apriori_
2261
2007 0.706225
2008 0.706225
2009 0.706225
2010 0.706225
2011 0.706225
2012 0.706225
2013 0.706225

Setting ``decay`` below 1 down-weights distant origins when computing
each origin's apriori, so each origin receives its own loss-ratio
estimate that drifts toward more recent experience.

>>> cl.CapeCod(decay=0.5).fit(tr, sample_weight=exposure).apriori_
2261
2007 0.653584
2008 0.666113
2009 0.683132
2010 0.689123
2011 0.717497
2012 0.776364
2013 0.836006

Setting ``trend`` projects the loss ratio forward over the experience
period. With ``decay=1``, all origins share the trended apriori.

>>> cl.CapeCod(trend=0.05).fit(tr, sample_weight=exposure).apriori_
2261
2007 0.836096
2008 0.836096
2009 0.836096
2010 0.836096
2011 0.836096
2012 0.836096
2013 0.836096
"""

def __init__(
Expand Down Expand Up @@ -81,6 +132,16 @@ def fit(self, X, y=None, sample_weight=None):
-------
self: object
Returns the instance itself.

Examples
--------
Fit returns the estimator itself, with ``ultimate_`` and
``apriori_`` populated. The repr shows non-default parameters.

>>> tr = cl.load_sample('ukmotor')
>>> exposure = cl.Chainladder().fit(tr).ultimate_ * 0 + 20000
>>> cl.CapeCod(trend=0.05).fit(tr, sample_weight=exposure)
CapeCod(trend=0.05)
"""

if sample_weight is None:
Expand Down Expand Up @@ -138,6 +199,28 @@ def predict(self, X, sample_weight=None):
-------
X_new: Triangle
Loss data with CapeCod ultimate applied

Examples
--------
Fit on a prior-period view of the data, then apply the model to the
current Triangle and a refreshed exposure.

>>> tr = cl.load_sample('ukmotor')
>>> tr_prior = tr[tr.valuation < tr.valuation_date]
>>> exposure_prior = cl.Chainladder().fit(tr_prior).ultimate_ * 0 + 20000
>>> exposure = cl.Chainladder().fit(tr).ultimate_ * 0 + 20000
>>> model = cl.CapeCod(trend=0.05).fit(
... tr_prior, sample_weight=exposure_prior
... )
>>> model.predict(tr, sample_weight=exposure).ultimate_
2261
2007 12690.000000
2008 12746.000000
2009 13631.353487
2010 12896.639975
2011 13806.211939
2012 15991.144199
2013 17489.630279
"""
if sample_weight is None:
raise ValueError("sample_weight is required.")
Expand Down
81 changes: 81 additions & 0 deletions chainladder/methods/chainladder.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,57 @@ class Chainladder(MethodBase):
full_triangle_:
The ultimates back-filled to each development period in **X** retaining
the known data

Examples
--------
Fit the chainladder method to a loss triangle and inspect the projected
ultimates.

>>> tr = cl.load_sample('ukmotor')
>>> model = cl.Chainladder().fit(tr)
>>> model.ultimate_
2261
2007 12690.000000
2008 13096.902024
2009 14030.536767
2010 13137.859861
2011 13880.404483
2012 16812.150646
2013 20679.919151

The ``ibnr_`` attribute is ``ultimate_ - latest_diagonal``. The 2007 origin
is fully developed in the data, so its IBNR is ``NaN``.

>>> model.ibnr_
2261
2007 NaN
2008 350.902024
2009 1037.536767
2010 2044.859861
2011 3663.404483
2012 7162.150646
2013 14396.919151

``full_triangle_`` projects each origin to ultimate while preserving the
known cells. Showing the last three origins and the first five development
periods makes the data-to-projection boundary visible: whole-number cells
are observed, decimal cells are projected.

>>> model.full_triangle_.iloc[..., -3:, :5]
12 24 36 48 60
2011 4150.0 7897.000000 10217.000000 11719.970266 12853.969769
2012 5102.0 9650.000000 12374.981102 14195.400857 15568.917781
2013 6283.0 11870.058983 15221.943585 17461.165333 19150.670711

``full_expectation_`` is similar but replaces every cell, including the
known ones, with the model's expectation. Compare the ``12`` column above
against the same slice below: the observed values have been overwritten.

>>> model.full_expectation_.iloc[..., -3:, :5]
12 24 36 48 60
2011 4217.162588 7967.208127 10217.000000 11719.970266 12853.969769
2012 5107.889530 9650.000000 12374.981102 14195.400857 15568.917781
2013 6283.000000 11870.058983 15221.943585 17461.165333 19150.670711
"""

def fit(self, X, y=None, sample_weight=None):
Expand All @@ -42,6 +93,15 @@ def fit(self, X, y=None, sample_weight=None):
-------
self: object
Returns the instance itself.

Examples
--------
Fitting returns the estimator itself, so it can be chained with
attribute access.

>>> tr = cl.load_sample('ukmotor')
>>> cl.Chainladder().fit(tr)
Chainladder()
"""
super().fit(X, y, sample_weight)
self.ultimate_ = self._get_ultimate(self.X_)
Expand All @@ -62,6 +122,27 @@ def predict(self, X, sample_weight=None):
-------
X_new: Triangle
Loss data with chainladder ultimate applied

Examples
--------
``predict`` applies the fitted development patterns to a different
Triangle. A common workflow is to fit on a prior-period view of the
data (one diagonal removed) and then apply that model to the current
Triangle. The ultimates differ from a freshly-fit model because the
patterns reflect the older view.

>>> tr = cl.load_sample('ukmotor')
>>> tr_prior = tr[tr.valuation < tr.valuation_date]
>>> model = cl.Chainladder().fit(tr_prior)
>>> model.predict(tr).ultimate_
2261
2007 12690.000000
2008 12746.000000
2009 13641.379750
2010 12719.871218
2011 13485.986574
2012 16296.783586
2013 20040.175415
"""
X_new = super().predict(X, sample_weight)
X_new.ultimate_ = self._get_ultimate(X_new, sample_weight)
Expand Down
Loading
Loading