Skip to content

Commit

Permalink
Instruct pre-commit to ignore Rust files and release workflow
Browse files Browse the repository at this point in the history
  • Loading branch information
cschwan committed Oct 24, 2024
1 parent a7c717a commit 9ac905d
Show file tree
Hide file tree
Showing 3 changed files with 68 additions and 25 deletions.
4 changes: 3 additions & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@ repos:
rev: v4.6.0
hooks:
- id: trailing-whitespace
exclude: ^pineappl_cli/tests/
# `.rs` files are taken care of by `cargo fmt`
# `.github/workflows/release.yml` contains vital spaces
exclude: ^(.*\.rs|\.github/workflows/release.yml)$
- id: end-of-file-fixer
- id: check-merge-conflict
- repo: https://github.com/astral-sh/ruff-pre-commit
Expand Down
14 changes: 9 additions & 5 deletions pineappl_py/docs/source/advanced.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,7 @@
"\n",
"np.random.seed(1234567890)\n",
"\n",
"\n",
"def hadronic_ps_gen(\n",
" mmin: float, mmax: float\n",
") -> Tuple[float, float, float, float, float, float]:\n",
Expand Down Expand Up @@ -156,7 +157,7 @@
" r1 = np.random.uniform()\n",
" r2 = np.random.uniform()\n",
" r3 = np.random.uniform()\n",
" \n",
"\n",
" # generate partonic x1 and x2\n",
" tau0 = smin / smax\n",
" tau = pow(tau0, r1)\n",
Expand Down Expand Up @@ -256,6 +257,7 @@
"source": [
"import pineappl\n",
"\n",
"\n",
"def fill_grid(grid: pineappl.grid.Grid, calls: int):\n",
" \"\"\"Fill grid with points.\"\"\"\n",
"\n",
Expand Down Expand Up @@ -293,7 +295,7 @@
" weight = jacobian * photon_photon_matrix_element(s, u, t)\n",
" # set factorization and renormalization scale to (roughly) the Z-boson mass\n",
" q2 = 90.0 * 90.0\n",
" \n",
"\n",
" # fill the interpolation grid\n",
" grid.fill(x1, x2, q2, 0, np.abs(yll), 0, weight)"
]
Expand Down Expand Up @@ -367,7 +369,7 @@
"grid = generate_grid(1000000)\n",
"\n",
"# perform convolution with PDFs: this performs the x1 and x2 integrals\n",
"# of the partonic cross sections with the PDFs as given by our master \n",
"# of the partonic cross sections with the PDFs as given by our master\n",
"# formula\n",
"pdf = lhapdf.mkPDF(\"NNPDF31_nnlo_as_0118_luxqed\", 0)\n",
"bins = grid.convolve_with_one(2212, pdf.xfxQ2, pdf.alphasQ2)"
Expand Down Expand Up @@ -411,8 +413,10 @@
"nbins = np.append(bins, bins[-1])\n",
"edges = np.arange(0.0, 2.4, 0.1)\n",
"\n",
"ax.step(edges, nbins, where='post', color=\"C1\")\n",
"plt.fill_between(np.arange(0.0, 2.4, 0.1), nbins, step=\"post\", color=\"C1\", alpha=0.2)\n",
"ax.step(edges, nbins, where=\"post\", color=\"C1\")\n",
"plt.fill_between(\n",
" np.arange(0.0, 2.4, 0.1), nbins, step=\"post\", color=\"C1\", alpha=0.2\n",
")\n",
"ax.set_xlabel(\"$|y_{\\ell\\ell}|$\")\n",
"ax.set_ylabel(\"$\\mathrm{d} \\sigma / \\mathrm{d} |y_{\\ell\\ell}|$ [pb]\")\n",
"ax.grid(True, alpha=0.5)\n",
Expand Down
75 changes: 56 additions & 19 deletions pineappl_py/docs/source/introduction.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -125,6 +125,7 @@
"# We first need to load the PDF set with LHAPDF\n",
"import lhapdf\n",
"import numpy as np\n",
"\n",
"# `Polars` is a better alternative to Pandas (written in Rust!)\n",
"import polars as pl\n",
"\n",
Expand Down Expand Up @@ -188,10 +189,12 @@
],
"source": [
"predictions = grid.convolve_with_one(2212, pdf.xfxQ2, pdf.alphasQ2)\n",
"df_preds = pl.DataFrame({\n",
" \"bins\": range(predictions.size),\n",
" \"predictions\": predictions,\n",
"})\n",
"df_preds = pl.DataFrame(\n",
" {\n",
" \"bins\": range(predictions.size),\n",
" \"predictions\": predictions,\n",
" }\n",
")\n",
"df_preds"
]
},
Expand Down Expand Up @@ -231,17 +234,44 @@
"import matplotlib.pyplot as plt\n",
"\n",
"# Experimental central values as provided by HepData\n",
"data_central = np.array([\n",
" 1223.0, 3263.0, 4983.0, 6719.0, 8051.0, 8967.0, 9561.0, 9822.0, 9721.0, 9030.0, 7748.0, 6059.0, 4385.0, 2724.0, 1584.0, 749.0, 383.0, 11.0\n",
"])\n",
"data_central = np.array(\n",
" [\n",
" 1223.0,\n",
" 3263.0,\n",
" 4983.0,\n",
" 6719.0,\n",
" 8051.0,\n",
" 8967.0,\n",
" 9561.0,\n",
" 9822.0,\n",
" 9721.0,\n",
" 9030.0,\n",
" 7748.0,\n",
" 6059.0,\n",
" 4385.0,\n",
" 2724.0,\n",
" 1584.0,\n",
" 749.0,\n",
" 383.0,\n",
" 11.0,\n",
" ]\n",
")\n",
"\n",
"# Normalization for each bin. See Section below for more details.\n",
"bin_norm = np.array([0.125 for _ in range(predictions.size - 2)] + [0.250, 0.250])\n",
"bin_norm = np.array(\n",
" [0.125 for _ in range(predictions.size - 2)] + [0.250, 0.250]\n",
")\n",
"\n",
"fig, ax = plt.subplots(figsize=(5.6, 3.9))\n",
"# Factor of `1e3` takes into account the unit conversion into `fb`\n",
"ax.plot(df_preds[\"bins\"], 1e3 * bin_norm * df_preds[\"predictions\"], 's', markersize=8, label=\"theory\")\n",
"ax.plot(df_preds[\"bins\"], data_central, 'o', markersize=8, label=\"data\")\n",
"ax.plot(\n",
" df_preds[\"bins\"],\n",
" 1e3 * bin_norm * df_preds[\"predictions\"],\n",
" \"s\",\n",
" markersize=8,\n",
" label=\"theory\",\n",
")\n",
"ax.plot(df_preds[\"bins\"], data_central, \"o\", markersize=8, label=\"data\")\n",
"ax.grid(True, alpha=0.5)\n",
"ax.set_yscale(\"log\")\n",
"ax.set_xlabel(\"bins\")\n",
Expand Down Expand Up @@ -417,10 +447,7 @@
"for idx, o in enumerate(grid.orders()):\n",
" orders.append(o.as_tuple())\n",
"\n",
"df_orders = pl.DataFrame(\n",
" np.array(orders),\n",
" schema=[\"as\", \"a\", \"lf\", \"lr\"]\n",
")\n",
"df_orders = pl.DataFrame(np.array(orders), schema=[\"as\", \"a\", \"lf\", \"lr\"])\n",
"df_orders.with_row_index()"
]
},
Expand Down Expand Up @@ -506,10 +533,18 @@
"# an associated bin normalization.\n",
"df = pl.DataFrame({})\n",
"for bin_dim in range(bin_dims):\n",
" df = pl.concat([df,pl.DataFrame({\n",
" f\"dim {bin_dim} left\": grid.bin_left(bin_dim),\n",
" f\"dim {bin_dim} right\": grid.bin_right(bin_dim),\n",
" })],how=\"vertical\",)\n",
" df = pl.concat(\n",
" [\n",
" df,\n",
" pl.DataFrame(\n",
" {\n",
" f\"dim {bin_dim} left\": grid.bin_left(bin_dim),\n",
" f\"dim {bin_dim} right\": grid.bin_right(bin_dim),\n",
" }\n",
" ),\n",
" ],\n",
" how=\"vertical\",\n",
" )\n",
"df"
]
},
Expand Down Expand Up @@ -670,7 +705,9 @@
],
"source": [
"# Load our modified grids\n",
"grid_nrm = pineappl.grid.Grid.read(\"./LHCB_DY_8TEV_custom_normalizations.pineappl.lz4\")\n",
"grid_nrm = pineappl.grid.Grid.read(\n",
" \"./LHCB_DY_8TEV_custom_normalizations.pineappl.lz4\"\n",
")\n",
"df_nbins = pl.DataFrame({\"bin normalization\": grid_nrm.bin_normalizations()})\n",
"df_nbins.with_row_index()"
]
Expand Down

0 comments on commit 9ac905d

Please sign in to comment.