Skip to content

Commit

Permalink
Merge pull request #169 from nf-core/dev
Browse files Browse the repository at this point in the history
Release candidate 2.1.0
  • Loading branch information
maxulysse authored Jun 1, 2023
2 parents b4d89cf + c3cf64d commit fe4ac65
Show file tree
Hide file tree
Showing 60 changed files with 337 additions and 234 deletions.
2 changes: 1 addition & 1 deletion .editorconfig
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ trim_trailing_whitespace = true
indent_size = 4
indent_style = space

[*.{md,yml,yaml,html,css,scss,js,cff}]
[*.{md,yml,yaml,html,css,scss,js}]
indent_size = 2

# These files are edited and tested upstream in nf-core/modules
Expand Down
2 changes: 1 addition & 1 deletion .github/ISSUE_TEMPLATE/bug_report.yml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,6 @@ body:
* Nextflow version _(eg. 22.10.1)_
* Hardware _(eg. HPC, Desktop, Cloud)_
* Executor _(eg. slurm, local, awsbatch)_
* Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter or Charliecloud)_
* Container engine: _(e.g. Docker, Singularity, Conda, Podman, Shifter, Charliecloud, or Apptainer)_
* OS _(eg. CentOS Linux, macOS, Linux Mint)_
* Version of nf-core/hic _(eg. 1.1, 1.5, 1.8.2)_
3 changes: 2 additions & 1 deletion .github/PULL_REQUEST_TEMPLATE.md
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,8 @@ Learn more about contributing: [CONTRIBUTING.md](https://github.com/nf-core/hic/

- [ ] This comment contains a description of changes (with reason).
- [ ] If you've fixed a bug or added code that should be tested, add tests!
- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)- [ ] If necessary, also make a PR on the nf-core/hic _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
- [ ] If you've added a new tool - have you followed the pipeline conventions in the [contribution docs](https://github.com/nf-core/hic/tree/master/.github/CONTRIBUTING.md)
- [ ] If necessary, also make a PR on the nf-core/hic _branch_ on the [nf-core/test-datasets](https://github.com/nf-core/test-datasets) repository.
- [ ] Make sure your code lints (`nf-core lint`).
- [ ] Ensure the test suite passes (`nextflow run . -profile test,docker --outdir <OUTDIR>`).
- [ ] Usage Documentation in `docs/usage.md` is updated.
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/awsfulltest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,7 @@ jobs:
{
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/hic/results-${{ github.sha }}"
}
profiles: test_full,aws_tower
profiles: test_full,public_aws_ecr
- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/awstest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
steps:
# Launch workflow using Tower CLI tool action
- name: Launch workflow via tower
uses: nf-core/tower-action@v3
uses: seqeralabs/action-tower-launch@v1
with:
workspace_id: ${{ secrets.TOWER_WORKSPACE_ID }}
access_token: ${{ secrets.TOWER_ACCESS_TOKEN }}
Expand All @@ -22,7 +22,7 @@ jobs:
{
"outdir": "s3://${{ secrets.AWS_S3_BUCKET }}/hic/results-test-${{ github.sha }}"
}
profiles: test,aws_tower
profiles: test,public_aws_ecr
- uses: actions/upload-artifact@v3
with:
name: Tower debug log file
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/branch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ jobs:
- name: Check PRs
if: github.repository == 'nf-core/hic'
run: |
{ [[ ${{github.event.pull_request.head.repo.full_name }} == nf-core/hic ]] && [[ $GITHUB_HEAD_REF = "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]]
{ [[ ${{github.event.pull_request.head.repo.full_name }} == nf-core/hic ]] && [[ $GITHUB_HEAD_REF == "dev" ]]; } || [[ $GITHUB_HEAD_REF == "patch" ]]
# If the above check failed, post a comment on the PR explaining the failure
# NOTE - this doesn't currently work if the PR is coming from a fork, due to limitations in GitHub actions secrets
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ jobs:
uses: actions/checkout@v3

- name: Install Nextflow
uses: nf-core/setup-nextflow@v1
uses: nf-core/setup-nextflow@v1.3.0
with:
version: "${{ matrix.NXF_VER }}"

Expand Down
24 changes: 24 additions & 0 deletions .github/workflows/clean-up.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
name: "Close user-tagged issues and PRs"
on:
schedule:
- cron: "0 0 * * 0" # Once a week

jobs:
clean-up:
runs-on: ubuntu-latest
permissions:
issues: write
pull-requests: write
steps:
- uses: actions/stale@v7
with:
stale-issue-message: "This issue has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment otherwise this issue will be closed in 20 days."
stale-pr-message: "This PR has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor. Remove stale label or add a comment if it is still useful."
close-issue-message: "This issue was closed because it has been tagged as awaiting-changes or awaiting-feedback by an nf-core contributor and then staled for 20 days with no activity."
days-before-stale: 30
days-before-close: 20
days-before-pr-close: -1
any-of-labels: "awaiting-changes,awaiting-feedback"
exempt-issue-labels: "WIP"
exempt-pr-labels: "WIP"
repo-token: "${{ secrets.GITHUB_TOKEN }}"
2 changes: 1 addition & 1 deletion .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,7 +78,7 @@ jobs:

- uses: actions/setup-python@v4
with:
python-version: "3.7"
python-version: "3.8"
architecture: "x64"

- name: Install dependencies
Expand Down
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
repos:
- repo: https://github.com/pre-commit/mirrors-prettier
rev: "v2.7.1"
hooks:
- id: prettier
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,14 @@
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).

## v2.1.0 - 2023-06-01

### `Added`

- Added public_aws_ecr profile for using containers stored on ECR.

### `Fixed`

## v2.0.0 - 2023-01-12

### `Added`
Expand Down
49 changes: 30 additions & 19 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
[![run with singularity](https://img.shields.io/badge/run%20with-singularity-1d355c.svg?labelColor=000000)](https://sylabs.io/docs/)
[![Launch on Nextflow Tower](https://img.shields.io/badge/Launch%20%F0%9F%9A%80-Nextflow%20Tower-%234256e7)](https://tower.nf/launch?pipeline=https://github.com/nf-core/hic)

[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23hic-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/hic)[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core)[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core)
[![Get help on Slack](http://img.shields.io/badge/slack-nf--core%20%23hic-4A154B?labelColor=000000&logo=slack)](https://nfcore.slack.com/channels/hic)[![Follow on Twitter](http://img.shields.io/badge/twitter-%40nf__core-1DA1F2?labelColor=000000&logo=twitter)](https://twitter.com/nf_core)[![Follow on Mastodon](https://img.shields.io/badge/mastodon-nf__core-6364ff?labelColor=FFFFFF&logo=mastodon)](https://mstdn.science/@nf_core)[![Watch on YouTube](http://img.shields.io/badge/youtube-nf--core-FF0000?labelColor=000000&logo=youtube)](https://www.youtube.com/c/nf-core)

## Introduction

Expand Down Expand Up @@ -36,34 +36,45 @@ On release, automated continuous integration tests run the pipeline on a full-si
8. TADs calling ([`HiCExplorer`](https://github.com/deeptools/HiCExplorer), [`cooltools`](https://cooltools.readthedocs.io/en/latest/))
9. Quality control report ([`MultiQC`](https://multiqc.info/))

## Quick Start
## Usage

1. Install [`Nextflow`](https://www.nextflow.io/docs/latest/getstarted.html#installation) (`>=22.10.1`)
> **Note**
> If you are new to Nextflow and nf-core, please refer to [this page](https://nf-co.re/docs/usage/installation) on how
> to set-up Nextflow. Make sure to [test your setup](https://nf-co.re/docs/usage/introduction#how-to-run-a-pipeline)
> with `-profile test` before running the workflow on actual data.
2. Install any of [`Docker`](https://docs.docker.com/engine/installation/), [`Singularity`](https://www.sylabs.io/guides/3.0/user-guide/) (you can follow [this tutorial](https://singularity-tutorial.github.io/01-installation/)), [`Podman`](https://podman.io/), [`Shifter`](https://nersc.gitlab.io/development/shifter/how-to-use/) or [`Charliecloud`](https://hpc.github.io/charliecloud/) for full pipeline reproducibility _(you can use [`Conda`](https://conda.io/miniconda.html) both to install Nextflow itself and also to manage software within pipelines. Please only use it within pipelines as a last resort; see [docs](https://nf-co.re/usage/configuration#basic-configuration-profiles))_.
First, prepare a samplesheet with your input data that looks as follows:

3. Download the pipeline and test it on a minimal dataset with a single command:
`samplesheet.csv`:

```bash
nextflow run nf-core/hic -profile test,YOURPROFILE --outdir <OUTDIR>
```
```csv
sample,fastq_1,fastq_2
HIC_ES_4,SRR5339783_1.fastq.gz,SRR5339783_2.fastq.gz
```

Note that some form of configuration will be needed so that Nextflow knows how to fetch the required software. This is usually done in the form of a config profile (`YOURPROFILE` in the example command above). You can chain multiple config profiles in a comma-separated string.
Each row represents a pair of fastq files (paired end).
Now, you can run the pipeline using:

> - The pipeline comes with config profiles called `docker`, `singularity`, `podman`, `shifter`, `charliecloud` and `conda` which instruct the pipeline to use the named tool for software management. For example, `-profile test,docker`.
> - Please check [nf-core/configs](https://github.com/nf-core/configs#documentation) to see if a custom config file to run nf-core pipelines already exists for your Institute. If so, you can simply use `-profile <institute>` in your command. This will enable either `docker` or `singularity` and set the appropriate execution settings for your local compute environment.
> - If you are using `singularity`, please use the [`nf-core download`](https://nf-co.re/tools/#downloading-pipelines-for-offline-use) command to download images first, before running the pipeline. Setting the [`NXF_SINGULARITY_CACHEDIR` or `singularity.cacheDir`](https://www.nextflow.io/docs/latest/singularity.html?#singularity-docker-hub) Nextflow options enables you to store and re-use the images from a central location for future pipeline runs.
> - If you are using `conda`, it is highly recommended to use the [`NXF_CONDA_CACHEDIR` or `conda.cacheDir`](https://www.nextflow.io/docs/latest/conda.html) settings to store the environments in a central location for future pipeline runs.
```bash
nextflow run nf-core/hic \
-profile <docker/singularity/.../institute> \
--input samplesheet.csv \
--genome GRCh37 \
--outdir <OUTDIR>
```

4. Start running your own analysis!
> **Warning:**
> Please provide pipeline parameters via the CLI or Nextflow `-params-file` option. Custom config files including those
> provided by the `-c` Nextflow option can be used to provide any configuration _**except for parameters**_;
> see [docs](https://nf-co.re/usage/configuration#custom-configuration-files).
```bash
nextflow run nf-core/hic --input samplesheet.csv --outdir <OUTDIR> --genome GRCh37 -profile <docker/singularity/podman/shifter/charliecloud/conda/institute>
```
For more details, please refer to the [usage documentation](https://nf-co.re/hic/usage) and the [parameter documentation](https://nf-co.re/hic/parameters).

## Documentation
## Pipeline output

The nf-core/hic pipeline comes with documentation about the pipeline [usage](https://nf-co.re/hic/usage), [parameters](https://nf-co.re/hic/parameters) and [output](https://nf-co.re/hic/output).
To see the the results of a test run with a full size dataset refer to the [results](https://nf-co.re/hic/results) tab on the nf-core website pipeline page.
For more details about the output files and reports, please refer to the
[output documentation](https://nf-co.re/hic/output).

## Credits

Expand Down
3 changes: 0 additions & 3 deletions bin/check_samplesheet.py
Original file line number Diff line number Diff line change
Expand Up @@ -157,9 +157,6 @@ def sniff_format(handle):
peek = read_head(handle)
handle.seek(0)
sniffer = csv.Sniffer()
if not sniffer.has_header(peek):
logger.critical("The given sample sheet does not appear to contain a header.")
sys.exit(1)
dialect = sniffer.sniff(peek)
return dialect

Expand Down
1 change: 0 additions & 1 deletion bin/mapped_2hic_dnase.py
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,6 @@ def get_read_tag(read, tag):

if cur_handler is not None:
if not r1.is_unmapped and not r2.is_unmapped:

##reorient reads to ease duplicates removal
or1, or2 = get_ordered_reads(r1, r2)
or1_chrom = samfile.get_reference_name(or1.reference_id)
Expand Down
1 change: 0 additions & 1 deletion bin/mapped_2hic_fragments.py
Original file line number Diff line number Diff line change
Expand Up @@ -619,7 +619,6 @@ def get_read_tag(read, tag):
r2_chrom = None

if r1_resfrag is not None or r2_resfrag is not None:

interactionType = get_interaction_type(r1, r1_chrom, r1_resfrag, r2, r2_chrom, r2_resfrag, verbose)
dist = get_PE_fragment_size(r1, r2, r1_resfrag, r2_resfrag, interactionType)
cdist = get_cis_dist(r1, r2)
Expand Down
1 change: 0 additions & 1 deletion bin/mergeSAM.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,7 +71,6 @@ def get_read_name(read):


def sam_flag(read1, read2, hr1, hr2):

f1 = read1.flag
f2 = read2.flag

Expand Down
2 changes: 1 addition & 1 deletion conf/base.config
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ process {
memory = { check_max( 8.GB * task.attempt, 'memory' ) }
time = { check_max( 12.h * task.attempt, 'time' ) }

errorStrategy = { task.exitStatus in [143,137,104,134,139] ? 'retry' : 'finish' }
errorStrategy = { task.exitStatus in ((130..145) + 104) ? 'retry' : 'finish' }
maxRetries = 1
maxErrors = '-1'

Expand Down
8 changes: 8 additions & 0 deletions conf/igenomes.config
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,14 @@ params {
fasta = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/WholeGenomeFasta/genome.fa"
bowtie2 = "${params.igenomes_base}/Homo_sapiens/NCBI/GRCh38/Sequence/Bowtie2Index/"
}
'CHM13' {
fasta = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/WholeGenomeFasta/genome.fa"
bwa = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/BWAIndex/"
bwamem2 = "${params.igenomes_base}/Homo_sapiens/UCSC/CHM13/Sequence/BWAmem2Index/"
gtf = "${params.igenomes_base}/Homo_sapiens/NCBI/CHM13/Annotation/Genes/genes.gtf"
gff = "ftp://ftp.ncbi.nlm.nih.gov/genomes/all/GCF/009/914/755/GCF_009914755.1_T2T-CHM13v2.0/GCF_009914755.1_T2T-CHM13v2.0_genomic.gff.gz"
mito_name = "chrM"
}
'GRCm38' {
fasta = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/WholeGenomeFasta/genome.fa"
bowtie2 = "${params.igenomes_base}/Mus_musculus/Ensembl/GRCm38/Sequence/Bowtie2Index/"
Expand Down
57 changes: 57 additions & 0 deletions conf/public_aws_ecr.config
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
/*
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
AWS ECR Config
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Config to set public AWS ECR images wherever possible
This improves speed when running on AWS infrastructure.
Use this as an example template when using your own private registry.
----------------------------------------------------------------------------------------
*/

docker.registry = 'public.ecr.aws'
podman.registry = 'public.ecr.aws'

process {
withName: 'BOWTIE2_ALIGN' {
container = 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:a0ffedb52808e102887f6ce600d092675bf3528a-0'
}
withName: 'BOWTIE2_ALIGN_TRIMMED' {
container = 'quay.io/biocontainers/mulled-v2-ac74a7f02cebcfcc07d8e8d1d750af9c83b4d45a:a0ffedb52808e102887f6ce600d092675bf3528a-0'
}
withName: 'BUILD_CONTACT_MAPS' {
container = 'quay.io/nf-core/ubuntu:20.04'
}
withName: 'COMBINE_MATES' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'COOLTOOLS_EIGSCIS' {
container = 'quay.io/biocontainers/mulled-v2-c81d8d6b6acf4714ffaae1a274527a41958443f6:cc7ea58b8cefc76bed985dcfe261cb276ed9e0cf-0'
}
withName: 'GET_RESTRICTION_FRAGMENTS' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'GET_VALID_INTERACTION' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'GET_VALID_INTERACTION_DNASE' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'ICE_NORMALIZATION' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'MERGE_STATS' {
container = 'quay.io/biocontainers/mulled-v2-c6ff206325681cbb9c9ef890bb8de554172c0483:713df51cd897ceb893b9a6e6420f527d83c2ed95-0'
}
withName: 'MERGE_VALID_INTERACTION' {
container = 'quay.io/nf-core/ubuntu:20.04'
}
withName: 'SAMPLESHEET_CHECK' {
container = 'quay.io/biocontainers/python:3.8.3'
}
withName: 'SPLIT_COOLER_DUMP' {
container = 'quay.io/nf-core/ubuntu:20.04'
}
withName: 'TRIM_READS' {
container = 'quay.io/nf-core/ubuntu:20.04'
}
}
Loading

0 comments on commit fe4ac65

Please sign in to comment.