diff --git a/.all-contributorsrc b/.all-contributorsrc new file mode 100644 index 00000000..6f41a7c5 --- /dev/null +++ b/.all-contributorsrc @@ -0,0 +1,5 @@ +{ + "projectName": "TulipaEnergyModel", + "projectOwner": "TulipaEnergy", + "files": ["README.md", "docs/src/index.md"] +} diff --git a/.copier-answers.yml b/.copier-answers.yml new file mode 100644 index 00000000..28e665d3 --- /dev/null +++ b/.copier-answers.yml @@ -0,0 +1,16 @@ +# Changes here will be overwritten by Copier +AnswerStrategy: recommended +Authors: Diego A. Tejada-Arango ,Germán Morales-España + ,Lauren Clisby ,Ni Wang ,Abel + Soares Siqueira ,Suvayu Ali ,Laurent + Soucasse ,Greg Neustroev +JuliaIndentation: 4 +JuliaMinVersion: '1.10' +License: Apache-2.0 +LicenseCopyrightHolders: Diego A. Tejada-Arango, Germán Morales-España, Lauren Clisby, + Ni Wang, Abel Soares Siqueira, Suvayu Ali, Laurent Soucasse, Greg Neustroev +PackageName: TulipaEnergyModel +PackageOwner: TulipaEnergy +PackageUUID: 5d7bd171-d18e-45a5-9111-f1f11ac5d04d +_commit: v0.12.0 +_src_path: https://github.com/JuliaBesties/BestieTemplate.jl diff --git a/.editorconfig b/.editorconfig index d0142364..f815163a 100644 --- a/.editorconfig +++ b/.editorconfig @@ -5,6 +5,14 @@ root = true end_of_line = lf insert_final_newline = true charset = utf-8 -indent_size = 4 indent_style = space trim_trailing_whitespace = true + +[*.jl] +indent_size = 4 + +[*.md] +indent_size = 2 + +[*.{yml,toml,json}] +indent_size = 2 diff --git a/.github/ISSUE_TEMPLATE/10-bug-report.yml b/.github/ISSUE_TEMPLATE/10-bug-report.yml new file mode 100644 index 00000000..a3a244c3 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/10-bug-report.yml @@ -0,0 +1,64 @@ +name: Bug Report +description: File a bug report related to running the package +title: "[Bug] " +labels: ["Type: bug"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this bug report! + + Please, before submitting, make sure that: + + - There is not an [existing issue](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/issues) with the same question + + - You have read the [contributing guide](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/90-contributing/) + + + + The form below should help you in filling out this issue. + - type: textarea + id: description + attributes: + label: Description + description: Describe the bug + validations: + required: true + - type: input + id: pkg-version + attributes: + label: Package Version + description: What version of the package are you running? + placeholder: In Julia, press ] to enter pkg mode, and then run `pkg> status` + validations: + required: true + - type: input + id: version + attributes: + label: Julia Version + description: What version of Julia are you running? + validations: + required: true + - type: textarea + id: reproduction + attributes: + label: Reproduction steps + description: What steps led to the bug happening? Please provide a minimal reproducible example. + validations: + required: true + - type: textarea + id: logs + attributes: + label: Relevant log output + description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. + render: shell + - type: dropdown + id: os + attributes: + label: "Operating System" + description: What is the impacted environment? + multiple: true + options: + - Windows + - Linux + - Mac diff --git a/.github/ISSUE_TEMPLATE/20-feature-request.yml b/.github/ISSUE_TEMPLATE/20-feature-request.yml new file mode 100644 index 00000000..b88202ac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/20-feature-request.yml @@ -0,0 +1,44 @@ +name: "Feature Request" +description: Suggest a new feature for the package +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this feature request! + + Please, before submitting, make sure that: + + - There is not an [existing issue](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/issues) with the same question + + - You have read the [contributing guide](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/90-contributing/) + + + + The form below should help you in filling out this issue. + - type: textarea + id: description + attributes: + label: Description + description: Describe the requested feature + validations: + required: true + - type: textarea + id: validation + attributes: + label: Validation and testing + description: How could we verify that the new feature works? What kind of tests can be done? + - type: textarea + id: motivation + attributes: + label: Motivation + description: Explain why this feature is relevant + - type: textarea + id: target + attributes: + label: Target audience + description: Tell more about the users of this feature, or where it could be useful + - type: textarea + id: can-help + attributes: + label: Can you help? + description: Can you help developing this feature? diff --git a/.github/ISSUE_TEMPLATE/30-usage.yml b/.github/ISSUE_TEMPLATE/30-usage.yml new file mode 100644 index 00000000..cca08eac --- /dev/null +++ b/.github/ISSUE_TEMPLATE/30-usage.yml @@ -0,0 +1,26 @@ +name: "Usage question" +description: Questions related to the usage +labels: ["documentation"] +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this question! + + Please, before submitting, make sure that: + + - You have checked the [documentation](https://TulipaEnergy.github.io/TulipaEnergyModel.jl) and haven't found enough information + - There is not an [existing issue](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/issues) with the same question + + - You have read the [contributing guide](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/90-contributing/) + + + + The form below should help you in filling out this issue. + - type: textarea + id: description + attributes: + label: Description + description: Write your question + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/40-epic.yml b/.github/ISSUE_TEMPLATE/40-epic.yml new file mode 100644 index 00000000..37cd8c54 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/40-epic.yml @@ -0,0 +1,23 @@ +name: "Epic issue" +description: Create a new Epic issue to collect smaller issues that are part of a larger feature or goal. +labels: ["Type: epic"] +body: + - type: textarea + id: description + attributes: + label: "Description" + description: Please enter an explicit description of the large feature or goal + validations: + required: true + - type: textarea + id: issues + attributes: + label: "Sub issues" + description: List all tasks that need to be accomplished and convert them to issues as you work on them + value: | + - [ ] + - [ ] + - [ ] + ... + validations: + required: false diff --git a/.github/ISSUE_TEMPLATE/99-general.yml b/.github/ISSUE_TEMPLATE/99-general.yml new file mode 100644 index 00000000..76a11902 --- /dev/null +++ b/.github/ISSUE_TEMPLATE/99-general.yml @@ -0,0 +1,24 @@ +name: "Other issues" +description: In case none of the others templates apply +body: + - type: markdown + attributes: + value: | + Thanks for taking the time to fill out this issue! + + Please, before submitting, make sure that: + + - There is not an [existing issue](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/issues) with the same question + + - You have read the [contributing guide](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/90-contributing/) + + + + The form below should help you in filling out this issue. + - type: textarea + id: description + attributes: + label: Description + description: Describe the issue + validations: + required: true diff --git a/.github/ISSUE_TEMPLATE/ADDITION-REQUEST.yml b/.github/ISSUE_TEMPLATE/ADDITION-REQUEST.yml deleted file mode 100644 index 78739235..00000000 --- a/.github/ISSUE_TEMPLATE/ADDITION-REQUEST.yml +++ /dev/null @@ -1,21 +0,0 @@ -name: "Addition Request" -description: Create a ticket to request a new addition (or feature) -title: "" -labels: ["Type: addition"] -body: - - type: textarea - id: description - attributes: - label: "Description" - description: Provide an explanation of the addition - placeholder: Describe your addition request - validations: - required: true - - type: textarea - id: related_issues - attributes: - label: "Related Issues" - description: Existing issues related to this addition - placeholder: "#Issues IDs" - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml b/.github/ISSUE_TEMPLATE/BUG-REPORT.yml deleted file mode 100644 index aeacf883..00000000 --- a/.github/ISSUE_TEMPLATE/BUG-REPORT.yml +++ /dev/null @@ -1,46 +0,0 @@ -name: "Bug Report" -description: Create a new ticket for a bug. -title: "[BUG] - <title>" -labels: ["Type: bug"] -body: - - type: textarea - id: description - attributes: - label: "Description" - description: Please enter an explicit description of your issue - placeholder: Short and explicit description of your incident... - validations: - required: true - - type: textarea - id: reprod - attributes: - label: "Reproduction steps" - description: Please describe steps to reproduce your issue - value: | - 1. Go to '...' - 2. Using input data '....' - 3. With function '....' - 4. [Specific error message] - render: bash - validations: - required: true - - type: textarea - id: logs - attributes: - label: "Logs" - description: Please copy and paste any relevant log output. This will be automatically formatted into code, so no need for backticks. - render: bash - validations: - required: false - - type: dropdown - id: os - attributes: - label: "OS" - description: What is the impacted environment ? - multiple: true - options: - - Windows - - Linux - - Mac - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/EPIC.yml b/.github/ISSUE_TEMPLATE/EPIC.yml deleted file mode 100644 index 5cab546d..00000000 --- a/.github/ISSUE_TEMPLATE/EPIC.yml +++ /dev/null @@ -1,25 +0,0 @@ -name: "EPIC" -description: Create a new Epic issue to collect smaller issues that are part of a larger feature or goal. -title: "<title>" -labels: ["Type: epic"] -body: - - type: textarea - id: description - attributes: - label: "Description" - description: Please enter an explicit description of the large feature or goal - placeholder: Short and explicit description of the epic issue... - validations: - required: true - - type: textarea - id: issues - attributes: - label: "Sub issues" - description: List all sub issues belonging to this - value: | - - [ ] - - [ ] - - [ ] - ... - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/IMPROVEMENT-REQUEST.yml b/.github/ISSUE_TEMPLATE/IMPROVEMENT-REQUEST.yml deleted file mode 100644 index 91227a37..00000000 --- a/.github/ISSUE_TEMPLATE/IMPROVEMENT-REQUEST.yml +++ /dev/null @@ -1,29 +0,0 @@ -name: "Improvement Request" -description: Create a ticket to request an improvement of something that already exists. -title: "<title>" -labels: ["Type: improvement"] -body: - - type: textarea - id: summary - attributes: - label: "What and Why" - description: Explain the improvement - placeholder: Describe your request, including why it's an improvement - validations: - required: true - - type: textarea - id: drawbacks - attributes: - label: "Possible Drawbacks" - description: What are possible drawbacks of your improvement request? - placeholder: Identify the drawbacks, while remaining neutral - validations: - required: false - - type: textarea - id: related_issues - attributes: - label: "Related Issues" - description: Existing issues related to this improvement - placeholder: "#Issues IDs" - validations: - required: false diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index 3ba13e0c..8d0e85e3 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1 +1,5 @@ blank_issues_enabled: false +contact_links: + - name: Discussions + url: https://github.com/TulipaEnergy/TulipaEnergyModel.jl/discussions + about: Create and follow discussions here diff --git a/.github/PULL_REQUEST_TEMPLATE.md b/.github/PULL_REQUEST_TEMPLATE.md index 2f9f4e3b..f5c941ec 100644 --- a/.github/PULL_REQUEST_TEMPLATE.md +++ b/.github/PULL_REQUEST_TEMPLATE.md @@ -1,18 +1,31 @@ -# Pull request details +<!-- +Thanks for making a pull request to TulipaEnergyModel.jl. +We have added this PR template to help you help us. -## Describe the changes made in this pull request +Make sure to read the contributing guidelines. -<!-- include screenshots if that helps the review --> +See the comments below, fill the required fields, and check the items. +--> -## List of related issues or pull requests +## Related issues -Closes #ISSUE_NUMBER +<!-- We normally work with (i) create issue; (ii) discussion if necessary; (iii) create PR. So, at least one of the following should be true:--> -## Collaboration confirmation +<!-- Option 1, this closes an existing issue. Fill the number below--> +Closes # -As a contributor I confirm +<!-- Option 2, this is a small fix that arguably won't need an issue. Uncomment below --> +<!-- +There is no related issue. +--> -- [ ] I read and followed the instructions in README.dev.md -- [ ] The documentation is up to date with the changes introduced in this Pull Request (or NA) -- [ ] Tests are passing -- [ ] Lint is passing +## Checklist + +<!-- mark true if NA --> +<!-- leave PR as draft until all is checked --> + +- [ ] I am following the [contributing guidelines](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/docs/src/90-contributing.md) + +- [ ] Tests are passing +- [ ] Lint workflow is passing +- [ ] Docs were updated and workflow is passing diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..700707ce --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,7 @@ +# https://docs.github.com/github/administering-a-repository/configuration-options-for-dependency-updates +version: 2 +updates: + - package-ecosystem: "github-actions" + directory: "/" # Location of package manifests + schedule: + interval: "weekly" diff --git a/.github/workflows/Benchmark.yml b/.github/workflows/Benchmark.yml index e28774c4..7b3164b1 100644 --- a/.github/workflows/Benchmark.yml +++ b/.github/workflows/Benchmark.yml @@ -1,113 +1,113 @@ name: Run benchmarks on: - pull_request: - types: - - opened - - synchronize - - reopened - - labeled + pull_request: + types: + - opened + - synchronize + - reopened + - labeled permissions: - contents: write - issues: write - pull-requests: write # For writing the comment + contents: write + issues: write + pull-requests: write # For writing the comment concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} jobs: - benchmark: - # Only runs if the PR originates from the repo itself and if the label 'benchmark' is assigned - if: contains(github.event.pull_request.labels.*.name, 'benchmark') && github.event.pull_request.head.repo.full_name == github.repository - runs-on: ubuntu-latest - env: - BASE: ${{ github.event.pull_request.base.sha }} - HEAD: ${{ github.event.pull_request.head.sha }} - steps: - - name: Find Comment (before benchmarks) - uses: peter-evans/find-comment@v2 - id: fcbenchmark-early - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: "github-actions[bot]" - body-includes: Benchmark Results - - name: Early Comment on PR - uses: peter-evans/create-or-update-comment@v3 - with: - comment-id: ${{ steps.fcbenchmark-early.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body: | - ### Benchmark Results + benchmark: + # Only runs if the PR originates from the repo itself and if the label 'benchmark' is assigned + if: contains(github.event.pull_request.labels.*.name, 'benchmark') && github.event.pull_request.head.repo.full_name == github.repository + runs-on: ubuntu-latest + env: + BASE: ${{ github.event.pull_request.base.sha }} + HEAD: ${{ github.event.pull_request.head.sha }} + steps: + - name: Find Comment (before benchmarks) + uses: peter-evans/find-comment@v2 + id: fcbenchmark-early + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: "github-actions[bot]" + body-includes: Benchmark Results + - name: Early Comment on PR + uses: peter-evans/create-or-update-comment@v3 + with: + comment-id: ${{ steps.fcbenchmark-early.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body: | + ### Benchmark Results - Benchmark in progress... - edit-mode: replace - - uses: actions/checkout@v4 - - uses: julia-actions/setup-julia@latest - with: - version: 1 - - uses: julia-actions/cache@v1 - - name: Extract Package Name from Project.toml - id: extract-package-name - run: | - PACKAGE_NAME=$(grep "^name" Project.toml | sed 's/^name = "\(.*\)"$/\1/') - echo "::set-output name=package_name::$PACKAGE_NAME" - - name: Build AirspeedVelocity - env: - JULIA_NUM_THREADS: 2 - run: | - # Lightweight build step, as sometimes the runner runs out of memory: - julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.add(;url="https://github.com/MilesCranmer/AirspeedVelocity.jl.git")' - julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.build("AirspeedVelocity")' - - name: Add ~/.julia/bin to PATH - run: | - echo "$HOME/.julia/bin" >> $GITHUB_PATH - - name: Run benchmarks - run: | - echo $PATH - ls -l ~/.julia/bin - mkdir results - benchpkg ${{ steps.extract-package-name.outputs.package_name }} \ - --rev="$BASE,$HEAD" \ - --url=${{ github.event.repository.clone_url }} \ - --bench-on="$HEAD" \ - --output-dir=results/ - - name: Create plots from benchmarks - run: | - mkdir -p plots - benchpkgplot ${{ steps.extract-package-name.outputs.package_name }} \ - --rev="$BASE,$HEAD" \ - --npart=10 --format=png --input-dir=results/ --output-dir=plots/ - - name: Upload plot as artifact - uses: actions/upload-artifact@v4 - with: - name: plots - path: plots - - name: Create markdown table from benchmarks - run: | - benchpkgtable ${{ steps.extract-package-name.outputs.package_name }} \ - --rev="$BASE,$HEAD" \ - --mode=time,memory \ - --input-dir=results/ --ratio > table.md - echo '### Benchmark Results' > body.md - echo '' >> body.md - echo '' >> body.md - cat table.md >> body.md - echo '' >> body.md - echo '' >> body.md - echo '### Benchmark Plots' >> body.md - echo 'A plot of the benchmark results have been uploaded as an artifact to the workflow run for this PR.' >> body.md - echo 'Go to "Actions"->"Benchmark a pull request"->[the most recent run]->"Artifacts" (at the bottom).' >> body.md - - name: Find Comment - uses: peter-evans/find-comment@v2 - id: fcbenchmark - with: - issue-number: ${{ github.event.pull_request.number }} - comment-author: "github-actions[bot]" - body-includes: Benchmark Results - - name: Comment on PR - uses: peter-evans/create-or-update-comment@v3 - with: - comment-id: ${{ steps.fcbenchmark.outputs.comment-id }} - issue-number: ${{ github.event.pull_request.number }} - body-path: body.md - edit-mode: replace + Benchmark in progress... + edit-mode: replace + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@latest + with: + version: 1 + - uses: julia-actions/cache@v1 + - name: Extract Package Name from Project.toml + id: extract-package-name + run: | + PACKAGE_NAME=$(grep "^name" Project.toml | sed 's/^name = "\(.*\)"$/\1/') + echo "::set-output name=package_name::$PACKAGE_NAME" + - name: Build AirspeedVelocity + env: + JULIA_NUM_THREADS: 2 + run: | + # Lightweight build step, as sometimes the runner runs out of memory: + julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.add(;url="https://github.com/MilesCranmer/AirspeedVelocity.jl.git")' + julia -e 'ENV["JULIA_PKG_PRECOMPILE_AUTO"]=0; import Pkg; Pkg.build("AirspeedVelocity")' + - name: Add ~/.julia/bin to PATH + run: | + echo "$HOME/.julia/bin" >> $GITHUB_PATH + - name: Run benchmarks + run: | + echo $PATH + ls -l ~/.julia/bin + mkdir results + benchpkg ${{ steps.extract-package-name.outputs.package_name }} \ + --rev="$BASE,$HEAD" \ + --url=${{ github.event.repository.clone_url }} \ + --bench-on="$HEAD" \ + --output-dir=results/ + - name: Create plots from benchmarks + run: | + mkdir -p plots + benchpkgplot ${{ steps.extract-package-name.outputs.package_name }} \ + --rev="$BASE,$HEAD" \ + --npart=10 --format=png --input-dir=results/ --output-dir=plots/ + - name: Upload plot as artifact + uses: actions/upload-artifact@v4 + with: + name: plots + path: plots + - name: Create markdown table from benchmarks + run: | + benchpkgtable ${{ steps.extract-package-name.outputs.package_name }} \ + --rev="$BASE,$HEAD" \ + --mode=time,memory \ + --input-dir=results/ --ratio > table.md + echo '### Benchmark Results' > body.md + echo '' >> body.md + echo '' >> body.md + cat table.md >> body.md + echo '' >> body.md + echo '' >> body.md + echo '### Benchmark Plots' >> body.md + echo 'A plot of the benchmark results have been uploaded as an artifact to the workflow run for this PR.' >> body.md + echo 'Go to "Actions"->"Benchmark a pull request"->[the most recent run]->"Artifacts" (at the bottom).' >> body.md + - name: Find Comment + uses: peter-evans/find-comment@v2 + id: fcbenchmark + with: + issue-number: ${{ github.event.pull_request.number }} + comment-author: "github-actions[bot]" + body-includes: Benchmark Results + - name: Comment on PR + uses: peter-evans/create-or-update-comment@v3 + with: + comment-id: ${{ steps.fcbenchmark.outputs.comment-id }} + issue-number: ${{ github.event.pull_request.number }} + body-path: body.md + edit-mode: replace diff --git a/.github/workflows/CompatHelper.yml b/.github/workflows/CompatHelper.yml index 185d3610..210e56f0 100644 --- a/.github/workflows/CompatHelper.yml +++ b/.github/workflows/CompatHelper.yml @@ -1,45 +1,51 @@ +# CompatHelper v3.5.0 name: CompatHelper + on: - schedule: - - cron: 0 0 1/7 * * # once a week - workflow_dispatch: + schedule: + - cron: 0 0 * * * # Every day at 00:00 UTC + workflow_dispatch: + permissions: - contents: write - pull-requests: write + contents: write + pull-requests: write + jobs: - CompatHelper: - runs-on: ubuntu-latest - steps: - - name: Check if Julia is already available in the PATH - id: julia_in_path - run: which julia - continue-on-error: true - - name: Install Julia, but only if it is not already available in the PATH - uses: julia-actions/setup-julia@v1 - with: - version: "1" - arch: ${{ runner.arch }} - if: steps.julia_in_path.outcome != 'success' - - name: "Add the General registry via Git" - run: | - import Pkg - ENV["JULIA_PKG_SERVER"] = "" - Pkg.Registry.add("General") - shell: julia --color=yes {0} - - name: "Install CompatHelper" - run: | - import Pkg - name = "CompatHelper" - uuid = "aa819f21-2bde-4658-8897-bab36330d9b7" - version = "3" - Pkg.add(; name, uuid, version) - shell: julia --color=yes {0} - - name: "Run CompatHelper" - run: | - import CompatHelper - CompatHelper.main() - shell: julia --color=yes {0} - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }} - # COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} + CompatHelper: + runs-on: ubuntu-latest + steps: + - name: Check if Julia is already available in the PATH + id: julia_in_path + run: which julia + continue-on-error: true + - name: Install Julia, but only if it is not already available in the PATH + uses: julia-actions/setup-julia@v2 + with: + version: "1" + arch: ${{ runner.arch }} + if: steps.julia_in_path.outcome != 'success' + - name: Use Julia cache + uses: julia-actions/cache@v2 + - name: "Add the General registry via Git" + run: | + import Pkg + ENV["JULIA_PKG_SERVER"] = "" + Pkg.Registry.add("General") + shell: julia --color=yes {0} + - name: "Install CompatHelper" + run: | + import Pkg + name = "CompatHelper" + uuid = "aa819f21-2bde-4658-8897-bab36330d9b7" + version = "3" + Pkg.add(; name, uuid, version) + shell: julia --color=yes {0} + - name: "Run CompatHelper" + run: | + import CompatHelper + CompatHelper.main() + shell: julia --color=yes {0} + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + COMPATHELPER_PRIV: ${{ secrets.DOCUMENTER_KEY }} + # COMPATHELPER_PRIV: ${{ secrets.COMPATHELPER_PRIV }} diff --git a/.github/workflows/Docs.yml b/.github/workflows/Docs.yml index 20309883..41d01cca 100644 --- a/.github/workflows/Docs.yml +++ b/.github/workflows/Docs.yml @@ -1,51 +1,57 @@ name: Docs + on: - push: - branches: - - main - paths: - - "docs/**" - - "src/**" - - "*.toml" - tags: ["*"] - pull_request: - paths: - - "docs/**" - - "src/**" - - "*.toml" + push: + branches: + - main + paths: + - "docs/**" + - "src/**" + - "*.toml" + tags: ["*"] + pull_request: + branches: + - main + paths: + - "docs/**" + - "src/**" + - "*.toml" + types: [opened, synchronize, reopened] + concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} -permissions: - contents: read + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + jobs: - docs: - name: Documentation - runs-on: ubuntu-latest - permissions: - contents: write - statuses: write - steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 - with: - version: "1" - - name: Configure doc environment - run: | - julia --project=docs/ -e ' - using Pkg - Pkg.develop(PackageSpec(path=pwd())) - Pkg.instantiate()' - - uses: julia-actions/julia-buildpkg@v1 - - uses: julia-actions/julia-docdeploy@v1 - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GKSwstype: "100" # https://discourse.julialang.org/t/generation-of-documentation-fails-qt-qpa-xcb-could-not-connect-to-display/60988 - - run: | - julia --project=docs -e ' - using Documenter: DocMeta, doctest - using TulipaEnergyModel - DocMeta.setdocmeta!(TulipaEnergyModel, :DocTestSetup, :(using TulipaEnergyModel); recursive=true) - doctest(TulipaEnergyModel)' + docs: + name: Documentation + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 + with: + version: "1" + - name: Use Julia cache + uses: julia-actions/cache@v2 + - name: Instantiate environment with development version of the package + run: | + julia --project=docs -e ' + using Pkg + Pkg.develop(PackageSpec(path=pwd())) + Pkg.instantiate()' + - name: Run doctest + run: | + julia --project=docs -e ' + using Documenter: DocMeta, doctest + using TulipaEnergyModel + DocMeta.setdocmeta!(TulipaEnergyModel, :DocTestSetup, :(using TulipaEnergyModel); recursive=true) + doctest(TulipaEnergyModel)' + - name: Generate and deploy documentation + run: julia --project=docs docs/make.jl + env: + JULIA_PKG_SERVER: "" + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + DOCUMENTER_KEY: ${{ secrets.DOCUMENTER_KEY }} + GKSwstype: "100" # https://discourse.julialang.org/t/generation-of-documentation-fails-qt-qpa-xcb-could-not-connect-to-display/60988 diff --git a/.github/workflows/Lint.yml b/.github/workflows/Lint.yml index a51190b1..f88abf40 100644 --- a/.github/workflows/Lint.yml +++ b/.github/workflows/Lint.yml @@ -1,35 +1,61 @@ name: Lint + on: - push: - branches: - - main - tags: ["*"] - pull_request: + push: + branches: + - main + tags: ["*"] + pull_request: + concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} -permissions: - contents: read + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + jobs: - lint: - name: Linting - runs-on: ubuntu-latest - steps: - - name: Clone - uses: actions/checkout@v3 - - name: Setup Julia - uses: julia-actions/setup-julia@v1 - with: - version: "1" - - name: Use Julia cache - uses: julia-actions/cache@v1 - - name: Install JuliaFormatter.jl - run: julia -e 'using Pkg; pkg"add JuliaFormatter"' - - name: Setup Python - uses: actions/setup-python@v4 - - name: Install pre-commit - run: pip install pre-commit - - name: Run pre-commit - run: SKIP=no-commit-to-branch pre-commit run -a + lint: + name: Linting + runs-on: ubuntu-latest + steps: + - name: Clone + uses: actions/checkout@v4 + - name: Setup Julia + uses: julia-actions/setup-julia@v2 + with: + version: "1" + - name: Use Julia cache + uses: julia-actions/cache@v2 + - name: Install JuliaFormatter.jl + run: julia -e 'using Pkg; pkg"add JuliaFormatter"' + - name: Hack for setup-python cache # https://github.com/actions/setup-python/issues/807 + run: touch requirements.txt + - name: Setup Python + uses: actions/setup-python@v5 + with: + cache: "pip" + python-version: "3.11" + - name: Hack for setup-python cache # https://github.com/actions/setup-python/issues/807 + run: rm requirements.txt + - name: Cache pre-commit + uses: actions/cache@v4 + with: + path: ~/.cache/pre-commit + key: ${{ runner.os }}-pre-commit-${{ hashFiles('**/.pre-commit-config.yaml') }} + - name: Install pre-commit + run: pip install pre-commit + - name: Run pre-commit + run: SKIP=no-commit-to-branch pre-commit run -a + + link-checker: + name: Link checker + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Link Checker + id: lychee + uses: lycheeverse/lychee-action@v1 + with: + fail: true + args: --config '.lychee.toml' . diff --git a/.github/workflows/PreCommitUpdate.yml b/.github/workflows/PreCommitUpdate.yml new file mode 100644 index 00000000..e5318c55 --- /dev/null +++ b/.github/workflows/PreCommitUpdate.yml @@ -0,0 +1,40 @@ +name: pre-commit Update + +on: + schedule: + - cron: "0 7 1/7 * *" # At 7:00 every 7 days + workflow_dispatch: + +jobs: + pre-commit-update: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - name: Hack for setup-python cache # https://github.com/actions/setup-python/issues/807 + run: touch requirements.txt + - name: Setup Python + uses: actions/setup-python@v5 + with: + cache: pip + python-version: "3.11" + - name: Hack for setup-python cache # https://github.com/actions/setup-python/issues/807 + run: rm requirements.txt + - name: Install pre-commit + run: pip install pre-commit + - name: Run pre-commit's autoupdate + run: | + # ignore exit code + pre-commit autoupdate || true + - name: Create Pull Request + id: cpr + uses: peter-evans/create-pull-request@v7 + with: + commit-message: "chore: :robot: pre-commit update" + title: "[AUTO] pre-commit update" + branch: auto-pre-commit-update + delete-branch: true + labels: chore + - name: Check outputs + run: | + echo "Pull Request Number - ${{ steps.cpr.outputs.pull-request-number }}" + echo "Pull Request URL - ${{ steps.cpr.outputs.pull-request-url }}" diff --git a/.github/workflows/ReusableTest.yml b/.github/workflows/ReusableTest.yml new file mode 100644 index 00000000..12431f28 --- /dev/null +++ b/.github/workflows/ReusableTest.yml @@ -0,0 +1,52 @@ +name: Reusable test + +on: + workflow_call: + inputs: + version: + required: false + type: string + default: "1" + os: + required: false + type: string + default: ubuntu-latest + arch: + required: false + type: string + default: x64 + allow_failure: + required: false + type: boolean + default: false + run_codecov: + required: false + type: boolean + default: false + secrets: + codecov_token: + required: true + +jobs: + test: + name: Julia ${{ inputs.version }} - ${{ inputs.os }} - ${{ inputs.arch }} - ${{ github.event_name }} + runs-on: ${{ inputs.os }} + continue-on-error: ${{ inputs.allow_failure }} + + steps: + - uses: actions/checkout@v4 + - uses: julia-actions/setup-julia@v2 + with: + version: ${{ inputs.version }} + arch: ${{ inputs.arch }} + - name: Use Julia cache + uses: julia-actions/cache@v2 + - uses: julia-actions/julia-buildpkg@v1 + - uses: julia-actions/julia-runtest@v1 + - uses: julia-actions/julia-processcoverage@v1 + if: ${{ inputs.run_codecov }} + - uses: codecov/codecov-action@v4 + if: ${{ inputs.run_codecov }} + with: + file: lcov.info + token: ${{ secrets.codecov_token }} diff --git a/.github/workflows/Stale.yml b/.github/workflows/Stale.yml index b677751b..b6955312 100644 --- a/.github/workflows/Stale.yml +++ b/.github/workflows/Stale.yml @@ -1,20 +1,20 @@ name: Close Stale Issues on: - schedule: - - cron: 30 1 * * * + schedule: + - cron: 30 1 * * * permissions: - contents: read + contents: read jobs: - stale: - runs-on: ubuntu-latest - steps: - - uses: actions/stale@v4.1.1 - with: - stale-issue-message: This issue is stale because it has been open 2 months with no activity. Remove the stale label or comment, otherwise this will be closed in 5 days. - stale-pr-message: This PR is stale because it has been open 1 month with no activity. Remove stale label or comment to revive it. - stale-issue-label: stale - exempt-issue-labels: "Type: bug" - days-before-issue-stale: 60 - days-before-pr-stale: 30 - days-before-close: -1 - days-before-pr-close: -1 + stale: + runs-on: ubuntu-latest + steps: + - uses: actions/stale@v4.1.1 + with: + stale-issue-message: This issue is stale because it has been open 2 months with no activity. Remove the stale label or comment, otherwise this will be closed in 5 days. + stale-pr-message: This PR is stale because it has been open 1 month with no activity. Remove stale label or comment to revive it. + stale-issue-label: stale + exempt-issue-labels: "Type: bug" + days-before-issue-stale: 60 + days-before-pr-stale: 30 + days-before-close: -1 + days-before-pr-close: -1 diff --git a/.github/workflows/TagBot.yml b/.github/workflows/TagBot.yml index 2e026645..dab9b0b6 100644 --- a/.github/workflows/TagBot.yml +++ b/.github/workflows/TagBot.yml @@ -1,31 +1,37 @@ name: TagBot + on: - issue_comment: - types: - - created - workflow_dispatch: - inputs: - lookback: - default: "3" + issue_comment: + types: + - created + workflow_dispatch: + inputs: + lookback: + type: number + default: 3 + permissions: - actions: read - checks: read - contents: write - deployments: read - issues: read - discussions: read - packages: read - pages: read - pull-requests: read - repository-projects: read - security-events: read - statuses: read + actions: read + checks: read + contents: write + deployments: read + issues: read + discussions: read + packages: read + pages: read + pull-requests: read + repository-projects: read + security-events: read + statuses: read + jobs: - TagBot: - if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' - runs-on: ubuntu-latest - steps: - - uses: JuliaRegistries/TagBot@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - ssh: ${{ secrets.DOCUMENTER_KEY }} + TagBot: + if: github.event_name == 'workflow_dispatch' || github.actor == 'JuliaTagBot' + runs-on: ubuntu-latest + steps: + - uses: JuliaRegistries/TagBot@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + # Edit the following line to reflect the actual name of the GitHub Secret containing your private key + ssh: ${{ secrets.DOCUMENTER_KEY }} + # ssh: ${{ secrets.NAME_OF_MY_SSH_PRIVATE_KEY_SECRET }} diff --git a/.github/workflows/Test.yml b/.github/workflows/Test.yml index 7c21f75e..bb4e024a 100644 --- a/.github/workflows/Test.yml +++ b/.github/workflows/Test.yml @@ -1,55 +1,33 @@ name: Test + on: - push: - branches: - - main - paths: - - "src/**" - - "test/**" - - "*.toml" - tags: ["*"] - # These lines were commented out because of issue #133 (run tests faster on PRs) - # pull_request: - # paths: - # - "src/**" - # - "test/**" - # - "*.toml" -concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} -permissions: - contents: read + push: + branches: + - main + tags: ["*"] + workflow_dispatch: + jobs: - test: - name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - version: - - "1.6" - - "1" - os: - - ubuntu-latest - - macos-latest - - windows-latest - arch: - - x64 - steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 - with: - version: ${{ matrix.version }} - arch: ${{ matrix.arch }} - - uses: julia-actions/cache@v1 - - uses: julia-actions/julia-buildpkg@v1 - - uses: julia-actions/julia-runtest@v1 - - uses: julia-actions/julia-processcoverage@v1 - if: matrix.version == '1' && matrix.os == 'ubuntu-latest' - - uses: codecov/codecov-action@v4 - if: matrix.version == '1' && matrix.os == 'ubuntu-latest' - with: - files: lcov.info - token: ${{ secrets.CODECOV_TOKEN }} # Defined in the organization secrets + test: + uses: ./.github/workflows/ReusableTest.yml + with: + os: ${{ matrix.os }} + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + allow_failure: ${{ matrix.allow_failure }} + run_codecov: ${{ matrix.version == '1' && matrix.os == 'ubuntu-latest' }} + secrets: + codecov_token: ${{ secrets.CODECOV_TOKEN }} + strategy: + fail-fast: false + matrix: + version: + - "lts" + - "1" + os: + - ubuntu-latest + - macOS-latest + - windows-latest + arch: + - x64 + allow_failure: [false] diff --git a/.github/workflows/TestOnPR.yml b/.github/workflows/TestOnPR.yml index 47b4a402..934245b8 100644 --- a/.github/workflows/TestOnPR.yml +++ b/.github/workflows/TestOnPR.yml @@ -2,45 +2,45 @@ # If you want to run the full test, change Test.yml and delete this file name: Test on PR on: - pull_request: - paths: - - "src/**" - - "test/**" - - "*.toml" + pull_request: + paths: + - "src/**" + - "test/**" + - "*.toml" concurrency: - # Skip intermediate builds: always. - # Cancel intermediate builds: only if it is a pull request build. - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} permissions: - contents: read + contents: read jobs: - test: - name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} - runs-on: ${{ matrix.os }} - strategy: - fail-fast: false - matrix: - version: - - "1.6" - - "1" - os: - - ubuntu-latest - arch: - - x64 - steps: - - uses: actions/checkout@v3 - - uses: julia-actions/setup-julia@v1 - with: - version: ${{ matrix.version }} - arch: ${{ matrix.arch }} - - uses: julia-actions/cache@v1 - - uses: julia-actions/julia-buildpkg@v1 - - uses: julia-actions/julia-runtest@v1 - - uses: julia-actions/julia-processcoverage@v1 - if: matrix.version == '1' && matrix.os == 'ubuntu-latest' - - uses: codecov/codecov-action@v4 - if: matrix.version == '1' && matrix.os == 'ubuntu-latest' - with: - files: lcov.info - token: ${{ secrets.CODECOV_TOKEN }} # Defined in the organization secrets + test: + name: Julia ${{ matrix.version }} - ${{ matrix.os }} - ${{ matrix.arch }} - ${{ github.event_name }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + version: + - "1.6" + - "1" + os: + - ubuntu-latest + arch: + - x64 + steps: + - uses: actions/checkout@v3 + - uses: julia-actions/setup-julia@v1 + with: + version: ${{ matrix.version }} + arch: ${{ matrix.arch }} + - uses: julia-actions/cache@v1 + - uses: julia-actions/julia-buildpkg@v1 + - uses: julia-actions/julia-runtest@v1 + - uses: julia-actions/julia-processcoverage@v1 + if: matrix.version == '1' && matrix.os == 'ubuntu-latest' + - uses: codecov/codecov-action@v4 + if: matrix.version == '1' && matrix.os == 'ubuntu-latest' + with: + files: lcov.info + token: ${{ secrets.CODECOV_TOKEN }} # Defined in the organization secrets diff --git a/.github/workflows/TestOnPRs.yml b/.github/workflows/TestOnPRs.yml new file mode 100644 index 00000000..19a1f5a8 --- /dev/null +++ b/.github/workflows/TestOnPRs.yml @@ -0,0 +1,29 @@ +name: Test on PRs + +on: + pull_request: + branches: + - main + paths: + - "src/**" + - "test/**" + - "*.toml" + types: [opened, synchronize, reopened] + +concurrency: + # Skip intermediate builds: always. + # Cancel intermediate builds: only if it is a pull request build. + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: ${{ startsWith(github.ref, 'refs/pull/') }} + +jobs: + test: + uses: ./.github/workflows/ReusableTest.yml + with: + os: ubuntu-latest + version: "1" + arch: x64 + allow_failure: false + run_codecov: true + secrets: + codecov_token: ${{ secrets.CODECOV_TOKEN }} diff --git a/.gitignore b/.gitignore index 0f9bfad9..754b4470 100644 --- a/.gitignore +++ b/.gitignore @@ -3,12 +3,15 @@ *.jl.mem *.log *.lp +*.rej +.DS_Store .benchmarkci +.vscode Manifest.toml benchmark/*.json -coverage/ -debugging/ -docs/build/ +coverage +debugging +docs/build env +node_modules test/outputs -.vscode/ diff --git a/.lychee.toml b/.lychee.toml new file mode 100644 index 00000000..f0d06421 --- /dev/null +++ b/.lychee.toml @@ -0,0 +1,9 @@ +exclude = [ + "@ref", + "^https://github.com/.*/releases/tag/v.*$", + "^https://code.visualstudio.com$", +] + +exclude_path = [ + "docs/build" +] diff --git a/.markdown-link-check.json b/.markdown-link-check.json deleted file mode 100644 index 39bac172..00000000 --- a/.markdown-link-check.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "ignorePatterns": [ - { - "pattern": "^@ref$" - } - ] -} diff --git a/.markdownlint.json b/.markdownlint.json new file mode 100644 index 00000000..8bf958ad --- /dev/null +++ b/.markdownlint.json @@ -0,0 +1,13 @@ +{ + "MD007": { + "indent": 2, + "start_indented": false + }, + "MD013": { + "line_length": 1000, + "tables": false + }, + "MD033": false, + "MD041": false, + "default": true +} diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9bf764f9..b1622b42 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,35 +1,49 @@ repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.4.0 - hooks: - - id: check-json - - id: check-toml - - id: check-yaml - - id: end-of-file-fixer - - id: file-contents-sorter - files: .JuliaFormatter.toml - args: [--unique] - - id: mixed-line-ending - args: [--fix=lf] - - id: no-commit-to-branch - - id: pretty-format-json - args: [--autofix, --indent=4] - - id: trailing-whitespace - - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v3.0.2" # Use the sha or tag you want to point at - hooks: - - id: prettier - types_or: [yaml, json, markdown] - - repo: https://github.com/adrienverge/yamllint - rev: v1.32.0 - hooks: - - id: yamllint - - repo: https://github.com/domluna/JuliaFormatter.jl - rev: v1.0.35 - hooks: - - id: julia-formatter - - repo: https://github.com/tcort/markdown-link-check - rev: v3.11.2 - hooks: - - id: markdown-link-check - args: [--config, .markdown-link-check.json] + - repo: local + hooks: + # Prevent committing .rej files + - id: forbidden-files + name: forbidden files + entry: found Copier update rejection files; review them and remove them + language: fail + files: "\\.rej$" + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.6.0 + hooks: + - id: check-json + - id: check-toml + - id: check-yaml + - id: end-of-file-fixer + - id: file-contents-sorter + files: .JuliaFormatter.toml + args: [--unique] + - id: mixed-line-ending + args: [--fix=lf] + - id: no-commit-to-branch + - id: pretty-format-json + args: [--autofix, --indent=2] + - id: trailing-whitespace + - id: check-merge-conflict + args: [--assume-in-merge] + - repo: https://github.com/igorshubovych/markdownlint-cli + rev: v0.42.0 + hooks: + - id: markdownlint-fix + - repo: https://github.com/citation-file-format/cffconvert + rev: 054bda51dbe278b3e86f27c890e3f3ac877d616c + hooks: + - id: validate-cff + - repo: https://github.com/pre-commit/mirrors-prettier + rev: "v4.0.0-alpha.8" # Use the sha or tag you want to point at + hooks: + - id: prettier + types_or: [yaml, json] + exclude: ".copier-answers.yml" + - repo: https://github.com/adrienverge/yamllint + rev: v1.35.1 + hooks: + - id: yamllint + - repo: https://github.com/domluna/JuliaFormatter.jl + rev: v1.0.60 + hooks: + - id: julia-formatter diff --git a/.yamllint.yml b/.yamllint.yml index ffeab4ba..5e16e5fb 100644 --- a/.yamllint.yml +++ b/.yamllint.yml @@ -1,2 +1,2 @@ rules: - indentation: { spaces: 4 } + indentation: { spaces: 2 } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md deleted file mode 100644 index 30764884..00000000 --- a/CONTRIBUTING.md +++ /dev/null @@ -1,30 +0,0 @@ -# Contributing Guidelines - -Great that you want to contribute to the development of Tulipa! Please read these guidelines and our [README.dev](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/README.dev.md) to get you started. - -## GitHub Rules of Engagement - -- If you want to discuss something that isn't immediately actionable, post under Discussions. Convert it to an issue once it's actionable. -- All PR's should have an associated issue (unless it's a very minor fix). -- All issues should have 1 Type and 1+ Zone labels (unless Type: epic). -- Assign yourself to issues you want to address. Consider if you will be able to work on them in the near future (this week) — if not, leave them available for someone else. -- Set the issue Status to "In Progress" when you have started working on it. -- When finalizing a pull request, set the Status to "Ready for Review." If someone specific needs to review it, assign them as the reviewer (otherwise anyone can review). -- Issues addressed by merged PRs will automatically move to Done. -- If you want to discuss an issue at the next group meeting (or just get some attention), mark it with the "question" label. -- Issues without updates for 60 days (and PRs without updates in 30 days) will be labelled as "stale" and filtered out of view. There is a Stale project board to view and revive these. - -## Contributing Workflow - -Fork → Branch → Code → Push → Pull → Squash & Merge - -1. Fork the repository -2. Create a new branch (in your fork) -3. Do fantastic coding -4. Push to your fork -5. Create a pull request from your fork to the main repository -6. (After review) Squash and merge - -For a step-by-step guide to these steps, see our [README.dev.md](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/README.dev.md). - -We use this workflow in our quest to achieve the [Utopic Git History](https://blog.esciencecenter.nl/the-utopic-git-history-d44b81c09593). diff --git a/Project.toml b/Project.toml index 4f9e63a4..bc038799 100644 --- a/Project.toml +++ b/Project.toml @@ -37,7 +37,7 @@ Statistics = "1" TOML = "1" TimerOutputs = "0.5" TulipaIO = "0.1, 0.2" -julia = "1.6" +julia = "1.10" [extras] Test = "8dfed614-e22c-5e08-85e1-65c5234f0b40" diff --git a/README.md b/README.md index 470d9499..8e1c456a 100644 --- a/README.md +++ b/README.md @@ -1,18 +1,23 @@ # TulipaEnergyModel -[![Stable doc](https://img.shields.io/badge/docs-stable-blue.svg)](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/stable/) -[![Dev doc](https://img.shields.io/badge/docs-dev-blue.svg)](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/) +[![Stable Documentation](https://img.shields.io/badge/docs-stable-blue.svg)](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/stable) +[![In development documentation](https://img.shields.io/badge/docs-dev-blue.svg)](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev) +[![Build Status](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/workflows/Test/badge.svg)](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions) [![Test workflow status](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Test.yml/badge.svg?branch=main)](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Test.yml?query=branch%3Amain) [![Lint workflow Status](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Lint.yml/badge.svg?branch=main)](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Lint.yml?query=branch%3Amain) [![Docs workflow Status](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Docs.yml/badge.svg?branch=main)](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/actions/workflows/Docs.yml?query=branch%3Amain) + [![Coverage](https://codecov.io/gh/TulipaEnergy/TulipaEnergyModel.jl/branch/main/graph/badge.svg)](https://codecov.io/gh/TulipaEnergy/TulipaEnergyModel.jl) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8363262.svg)](https://doi.org/10.5281/zenodo.8363262) +[![All Contributors](https://img.shields.io/github/all-contributors/TulipaEnergy/TulipaEnergyModel.jl?labelColor=5e1ec7&color=c0ffee&style=flat-square)](#contributors) +[![BestieTemplate](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/JuliaBesties/BestieTemplate.jl/main/docs/src/assets/badge.json)](https://github.com/JuliaBesties/BestieTemplate.jl) + This package provides an optimization model for the electricity market and its coupling with other energy sectors (e.g., hydrogen, heat, natural gas, etc.). The main objective is to determine the optimal investment and operation decisions for different types of assets (e.g., producers, consumers, conversions, storages, and transports). ## How to Cite -If you use TulipaEnergyModel.jl in your work, please cite using the format given in [CITATION.cff](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/CITATION.cff). +If you use TulipaEnergyModel.jl in your work, please cite using the reference given in [CITATION.cff](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/CITATION.cff). ## Installation @@ -29,10 +34,23 @@ Focused suggestions and requests can also be opened as issues. Before opening a If you want to ask a question unsuitable for a bug report, start a discussion [here](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/discussions). This forum is for general discussion about the repository [TulipaEnergyModel](https://github.com/TulipaEnergy/TulipaEnergyModel.jl). -## Contribution - -If you want to contribute to the package, please read our [CONTRIBUTING.md](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/CONTRIBUTING.md) and follow the guidelines in the [README.dev.md](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/main/README.dev.md) file. - ## License This content is released under the [Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0) License. + +## Contributing + +If you want to make contributions of any kind, please first that a look into our [contributing guide directly on GitHub](docs/src/90-contributing.md) or the [contributing page on the website](https://TulipaEnergy.github.io/TulipaEnergyModel.jl/dev/90-contributing/). + +--- + +### Contributors + +<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section --> +<!-- prettier-ignore-start --> +<!-- markdownlint-disable --> + +<!-- markdownlint-restore --> +<!-- prettier-ignore-end --> + +<!-- ALL-CONTRIBUTORS-LIST:END --> diff --git a/docs/Project.toml b/docs/Project.toml index 905a9e66..935e29bf 100644 --- a/docs/Project.toml +++ b/docs/Project.toml @@ -6,6 +6,7 @@ DuckDB = "d2f5444f-75bc-4fdf-ac35-56f514c445e1" GLPK = "60bf3e95-4087-53dc-ae20-288a0d20c6a6" HiGHS = "87dc4568-4c63-4d18-b0c0-bb2238e4078b" JuMP = "4076af6c-e467-56ae-b986-b466b2749572" +LiveServer = "16fef848-5104-11e9-1b77-fb7a48bbb589" MetaGraphsNext = "fa8bd995-216d-47f1-8a91-f3b68fbeb377" TulipaEnergyModel = "5d7bd171-d18e-45a5-9111-f1f11ac5d04d" TulipaIO = "7b3808b7-0819-42d4-885c-978ba173db11" diff --git a/docs/make.jl b/docs/make.jl index 1e73ebf9..73625083 100644 --- a/docs/make.jl +++ b/docs/make.jl @@ -3,26 +3,19 @@ using Documenter DocMeta.setdocmeta!(TulipaEnergyModel, :DocTestSetup, :(using TulipaEnergyModel); recursive = true) +const page_rename = Dict("developer.md" => "Developer docs") # Without the numbers +const numbered_pages = [ + file for file in readdir(joinpath(@__DIR__, "src")) if + file != "index.md" && splitext(file)[2] == ".md" +] + makedocs(; modules = [TulipaEnergyModel], - repo = "https://github.com/TulipaEnergy/TulipaEnergyModel.jl.git", - authors = "Abel Soares Siqueira <abel.s.siqueira@gmail.com> and contributors", + authors = "Diego A. Tejada-Arango <diego.tejadaarango@tno.nl>,Germán Morales-España <german.morales@tno.nl>,Lauren Clisby <lauren.clisby@tno.nl>,Ni Wang <ni.wang@tno.nl>,Abel Soares Siqueira <abel.s.siqueira@gmail.com>,Suvayu Ali <s.ali@esciencecenter.nl>,Laurent Soucasse <l.soucasse@esciencecenter.nl>,Greg Neustroev <G.Neustroev@tudelft.nl>", + repo = "https://github.com/TulipaEnergy/TulipaEnergyModel.jl/blob/{commit}{path}#{line}", sitename = "TulipaEnergyModel.jl", - format = Documenter.HTML(; - prettyurls = get(ENV, "CI", "false") == "true", - canonical = "https://TulipaEnergy.github.io/TulipaEnergyModel.jl", - edit_link = "main", - assets = ["assets/style.css"], - ), - pages = [ - "Home" => "index.md", - "How to Use" => "how-to-use.md", - "Tutorials" => "tutorials.md", - "Concepts" => "concepts.md", - "Mathematical Formulation" => "formulation.md", - "API" => "api.md", - "Reference" => "reference.md", - ], + format = Documenter.HTML(; canonical = "https://TulipaEnergy.github.io/TulipaEnergyModel.jl"), + pages = ["index.md"; numbered_pages], ) -deploydocs(; repo = "github.com/TulipaEnergy/TulipaEnergyModel.jl", devbranch = "main") +deploydocs(; repo = "github.com/TulipaEnergy/TulipaEnergyModel.jl") diff --git a/docs/src/how-to-use.md b/docs/src/10-how-to-use.md similarity index 68% rename from docs/src/how-to-use.md rename to docs/src/10-how-to-use.md index 6975abab..36dbce84 100644 --- a/docs/src/how-to-use.md +++ b/docs/src/10-how-to-use.md @@ -15,20 +15,20 @@ Then consider installing a user-friendly code editor, such as [VSCode](https://c Choose one: -- In VSCode: Press CTRL+Shift+P and press Enter to start a Julia REPL. -- In the terminal: Type `julia` and press Enter +- In VSCode: Press CTRL+Shift+P and press Enter to start a Julia REPL. +- In the terminal: Type `julia` and press Enter ### Adding TulipaEnergyModel In Julia: -- Enter package mode (press "]") +- Enter package mode (press "]") ```julia-pkg pkg> add TulipaEnergyModel ``` -- Return to Julia mode (backspace) +- Return to Julia mode (backspace) ```julia julia> using TulipaEnergyModel @@ -38,7 +38,7 @@ julia> using TulipaEnergyModel It is nice to check that tests are passing to make sure your environment is working. (This takes a minute or two.) -- Enter package mode (press "]") +- Enter package mode (press "]") ```julia-pkg pkg> test TulipaEnergyModel @@ -50,8 +50,8 @@ All tests should pass. To run a scenario, use the function: -- [`run_scenario(connection)`](@ref) -- [`run_scenario(connection; output_folder)`](@ref) +- [`run_scenario(connection)`](@ref) +- [`run_scenario(connection; output_folder)`](@ref) The `connection` should have been created and the data loaded into it using [TulipaIO](https://github.com/TulipaEnergy/TulipaIO.jl). See the [tutorials](@ref tutorials) for a complete guide on how to achieve this. @@ -88,7 +88,7 @@ This file contains the list of assets and the static data associated with each o The meaning of `Missing` data depends on the parameter, for instance: -- `group`: No group assigned to the asset. +- `group`: No group assigned to the asset. #### [`graph-flows-data.csv`](@id graph-flows-data) @@ -100,15 +100,15 @@ This file contains the yearly data of each asset. The investment parameters are as follows: -- The `investable` parameter determines whether there is an investment decision for the asset or flow. -- The `investment_integer` parameter determines if the investment decision is integer or continuous. -- The `investment_cost` parameter represents the cost in the defined [timeframe](@ref timeframe). Thus, if the timeframe is a year, the investment cost is the annualized cost of the asset. -- The `investment_limit` parameter limits the total investment capacity of the asset or flow. This limit represents the potential of that particular asset or flow. Without data in this parameter, the model assumes no investment limit. +- The `investable` parameter determines whether there is an investment decision for the asset or flow. +- The `investment_integer` parameter determines if the investment decision is integer or continuous. +- The `investment_cost` parameter represents the cost in the defined [timeframe](@ref timeframe). Thus, if the timeframe is a year, the investment cost is the annualized cost of the asset. +- The `investment_limit` parameter limits the total investment capacity of the asset or flow. This limit represents the potential of that particular asset or flow. Without data in this parameter, the model assumes no investment limit. The meaning of `Missing` data depends on the parameter, for instance: -- `investment_limit`: There is no investment limit. -- `initial_storage_level`: The initial storage level is free (between the storage level limits), meaning that the optimization problem decides the best starting point for the storage asset. In addition, the first and last time blocks in a representative period are linked to create continuity in the storage level. +- `investment_limit`: There is no investment limit. +- `initial_storage_level`: The initial storage level is free (between the storage level limits), meaning that the optimization problem decides the best starting point for the storage asset. In addition, the first and last time blocks in a representative period are linked to create continuity in the storage level. #### [`flows-data.csv`](@id flows-data) @@ -116,7 +116,7 @@ The same as [`assets-data.csv`](@ref assets-data), but for flows. Each flow is d The meaning of `Missing` data depends on the parameter, for instance: -- `investment_limit`: There is no investment limit. +- `investment_limit`: There is no investment limit. #### [`assets-profiles.csv`] (@id assets-profiles-definition) @@ -163,9 +163,9 @@ If not specified, each asset will have the same time resolution as the represent There are currently three ways to specify the desired resolution, indicated in the column `specification`. The column `partition` serves to define the partitions in the specified style. -- `specification = uniform`: Set the resolution to a uniform amount, i.e., a time block is made of `X` timesteps. The number `X` is defined in the column `partition`. The number of timesteps in the representative period must be divisible by `X`. -- `specification = explicit`: Set the resolution according to a list of numbers separated by `;` on the `partition`. Each number in the list is the number of timesteps for that time block. For instance, `2;3;4` means that there are three time blocks, the first has 2 timesteps, the second has 3 timesteps, and the last has 4 timesteps. The sum of the list must be equal to the total number of timesteps in that representative period, as specified in `num_timesteps` of [`rep-periods-data.csv`](@ref rep-periods-data). -- `specification = math`: Similar to explicit, but using `+` and `x` for simplification. The value of `partition` is a sequence of elements of the form `NxT` separated by `+`, indicating `N` time blocks of length `T`. For instance, `2x3+3x6` is 2 time blocks of 3 timesteps, followed by 3 time blocks of 6 timesteps, for a total of 24 timesteps in the representative period. +- `specification = uniform`: Set the resolution to a uniform amount, i.e., a time block is made of `X` timesteps. The number `X` is defined in the column `partition`. The number of timesteps in the representative period must be divisible by `X`. +- `specification = explicit`: Set the resolution according to a list of numbers separated by `;` on the `partition`. Each number in the list is the number of timesteps for that time block. For instance, `2;3;4` means that there are three time blocks, the first has 2 timesteps, the second has 3 timesteps, and the last has 4 timesteps. The sum of the list must be equal to the total number of timesteps in that representative period, as specified in `num_timesteps` of [`rep-periods-data.csv`](@ref rep-periods-data). +- `specification = math`: Similar to explicit, but using `+` and `x` for simplification. The value of `partition` is a sequence of elements of the form `NxT` separated by `+`, indicating `N` time blocks of length `T`. For instance, `2x3+3x6` is 2 time blocks of 3 timesteps, followed by 3 time blocks of 6 timesteps, for a total of 24 timesteps in the representative period. The table below shows various results for different formats for a representative period with 12 timesteps. @@ -214,26 +214,26 @@ It hides the complexity behind the energy problem, making the usage more friendl #### Fields -- `graph`: The [Graph](@ref) object that defines the geometry of the energy problem. -- `representative_periods`: A vector of [Representative Periods](@ref representative-periods). -- `constraints_partitions`: Dictionaries that connect pairs of asset and representative periods to [time partitions](@ref Partition) (vectors of time blocks). -- `timeframe`: The number of periods in the `representative_periods`. -- `dataframes`: A Dictionary of dataframes used to linearize the variables and constraints. These are used internally in the model only. -- `groups`: A vector of [Groups](@ref group). -- `model`: A JuMP.Model object representing the optimization model. -- `solution`: A structure of the variable values (investments, flows, etc) in the solution. -- `solved`: A boolean indicating whether the `model` has been solved or not. -- `objective_value`: The objective value of the solved problem (Float64). -- `termination_status`: The termination status of the optimization model. -- `time_read_data`: Time taken (in seconds) for reading the data (Float64). -- `time_create_model`: Time taken (in seconds) for creating the model (Float64). -- `time_solve_model`: Time taken (in seconds) for solving the model (Float64). +- `graph`: The [Graph](@ref) object that defines the geometry of the energy problem. +- `representative_periods`: A vector of [Representative Periods](@ref representative-periods). +- `constraints_partitions`: Dictionaries that connect pairs of asset and representative periods to [time partitions](@ref Partition) (vectors of time blocks). +- `timeframe`: The number of periods in the `representative_periods`. +- `dataframes`: A Dictionary of dataframes used to linearize the variables and constraints. These are used internally in the model only. +- `groups`: A vector of [Groups](@ref group). +- `model`: A JuMP.Model object representing the optimization model. +- `solution`: A structure of the variable values (investments, flows, etc) in the solution. +- `solved`: A boolean indicating whether the `model` has been solved or not. +- `objective_value`: The objective value of the solved problem (Float64). +- `termination_status`: The termination status of the optimization model. +- `time_read_data`: Time taken (in seconds) for reading the data (Float64). +- `time_create_model`: Time taken (in seconds) for creating the model (Float64). +- `time_solve_model`: Time taken (in seconds) for solving the model (Float64). #### Constructor The `EnergyProblem` can also be constructed using the minimal constructor below. -- `EnergyProblem(connection)`: Constructs a new `EnergyProblem` object with the given `connection` that has been created and the data loaded into it using [TulipaIO](https://github.com/TulipaEnergy/TulipaIO.jl). The `graph`, `representative_periods`, and `timeframe` are computed using `create_internal_structures`. The `constraints_partitions` field is computed from the `representative_periods`, and the other fields are initialized with default values. +- `EnergyProblem(connection)`: Constructs a new `EnergyProblem` object with the given `connection` that has been created and the data loaded into it using [TulipaIO](https://github.com/TulipaEnergy/TulipaIO.jl). The `graph`, `representative_periods`, and `timeframe` are computed using `create_internal_structures`. The `constraints_partitions` field is computed from the `representative_periods`, and the other fields are initialized with default values. See the [basic example tutorial](@ref basic-example) to see how these can be used. @@ -271,16 +271,16 @@ Some variables and constraints are defined over every time block in a partition. For instance, for a representative period with 12 timesteps, all sets below are partitions: -- $\{\{1, 2, 3\}, \{4, 5, 6\}, \{7, 8, 9\}, \{10, 11, 12\}\}$ -- $\{\{1, 2, 3, 4\}, \{5, 6, 7, 8\}, \{9, 10, 11, 12\}\}$ -- $\{\{1\}, \{2, 3\}, \{4\}, \{5, 6, 7, 8\}, \{9, 10, 11, 12\}\}$ +- $\{\{1, 2, 3\}, \{4, 5, 6\}, \{7, 8, 9\}, \{10, 11, 12\}\}$ +- $\{\{1, 2, 3, 4\}, \{5, 6, 7, 8\}, \{9, 10, 11, 12\}\}$ +- $\{\{1\}, \{2, 3\}, \{4\}, \{5, 6, 7, 8\}, \{9, 10, 11, 12\}\}$ ### [Timeframe](@id timeframe) The timeframe is the total period we want to analyze with the model. Usually this is a year, but it can be any length of time. A timeframe has two fields: -- `num_periods`: The timeframe is defined by a certain number of periods. For instance, a year can be defined by 365 periods, each describing a day. -- `map_periods_to_rp`: Indicates the periods of the timeframe that map into a [representative period](@ref representative-periods) and the weight of the representative period to construct that period. +- `num_periods`: The timeframe is defined by a certain number of periods. For instance, a year can be defined by 365 periods, each describing a day. +- `map_periods_to_rp`: Indicates the periods of the timeframe that map into a [representative period](@ref representative-periods) and the weight of the representative period to construct that period. ### [Representative Periods](@id representative-periods) @@ -288,9 +288,9 @@ The [timeframe](@ref timeframe) (e.g., a full year) is described by a selection A representative period has three fields: -- `weight`: Indicates how many representative periods are contained in the [timeframe](@ref timeframe); this is inferred automatically from `map_periods_to_rp` in the [timeframe](@ref timeframe). -- `timesteps`: The number of timesteps blocks in the representative period. -- `resolution`: The duration in time of each timestep. +- `weight`: Indicates how many representative periods are contained in the [timeframe](@ref timeframe); this is inferred automatically from `map_periods_to_rp` in the [timeframe](@ref timeframe). +- `timesteps`: The number of timesteps blocks in the representative period. +- `resolution`: The duration in time of each timestep. The number of timesteps and resolution work together to define the coarseness of the period. Nothing is defined outside of these timesteps; for instance, if the representative period represents a day and you want to specify a variable or constraint with a coarseness of 30 minutes. You need to define the number of timesteps to 48 and the resolution to `0.5`. @@ -299,13 +299,13 @@ Nothing is defined outside of these timesteps; for instance, if the representati The solution object `energy_problem.solution` is a mutable struct with the following fields: -- `assets_investment[a]`: The investment for each asset, indexed on the investable asset `a`. -- `flows_investment[u, v]`: The investment for each flow, indexed on the investable flow `(u, v)`. -- `storage_level_intra_rp[a, rp, timesteps_block]`: The storage level for the storage asset `a` within (intra) a representative period `rp` and a time block `timesteps_block`. The list of time blocks is defined by `constraints_partitions`, which was used to create the model. -- `storage_level_inter_rp[a, periods_block]`: The storage level for the storage asset `a` between (inter) representative periods in the periods block `periods_block`. -- `flow[(u, v), rp, timesteps_block]`: The flow value for a given flow `(u, v)` at a given representative period `rp`, and time block `timesteps_block`. The list of time blocks is defined by `graph[(u, v)].partitions[rp]`. -- `objective_value`: A Float64 with the objective value at the solution. -- `duals`: A Dictionary containing the dual variables of selected constraints. +- `assets_investment[a]`: The investment for each asset, indexed on the investable asset `a`. +- `flows_investment[u, v]`: The investment for each flow, indexed on the investable flow `(u, v)`. +- `storage_level_intra_rp[a, rp, timesteps_block]`: The storage level for the storage asset `a` within (intra) a representative period `rp` and a time block `timesteps_block`. The list of time blocks is defined by `constraints_partitions`, which was used to create the model. +- `storage_level_inter_rp[a, periods_block]`: The storage level for the storage asset `a` between (inter) representative periods in the periods block `periods_block`. +- `flow[(u, v), rp, timesteps_block]`: The flow value for a given flow `(u, v)` at a given representative period `rp`, and time block `timesteps_block`. The list of time blocks is defined by `graph[(u, v)].partitions[rp]`. +- `objective_value`: A Float64 with the objective value at the solution. +- `duals`: A Dictionary containing the dual variables of selected constraints. Check the [tutorial](@ref solution-tutorial) for tips on manipulating the solution. @@ -319,10 +319,10 @@ Time blocks are used for the periods in the [timeframe](@ref timeframe) and the This structure holds all the information of a given group with the following fields: -- `name`: The name of the group. -- `invest_method`: Boolean value to indicate whether or not the group has an investment method. -- `min_investment_limit`: A minimum investment limit in MW is imposed on the total investments of the assets belonging to the group. -- `max_investment_limit`: A maximum investment limit in MW is imposed on the total investments of the assets belonging to the group. +- `name`: The name of the group. +- `invest_method`: Boolean value to indicate whether or not the group has an investment method. +- `min_investment_limit`: A minimum investment limit in MW is imposed on the total investments of the assets belonging to the group. +- `max_investment_limit`: A maximum investment limit in MW is imposed on the total investments of the assets belonging to the group. ## [Exploring infeasibility](@id infeasible) @@ -346,8 +346,8 @@ end Section [Storage Modeling](@ref storage-modeling) explains the main concepts for modeling seasonal and non-seasonal storage in _TulipaEnergyModel.jl_. To define if an asset is one type or the other then consider the following: -- _Seasonal storage_: When the storage capacity of an asset is greater than the total length of representative periods, we recommend using the inter-temporal constraints. To apply these constraints, you must set the input parameter `is_seasonal` to `true` in the [`assets-data.csv`](@ref schemas). -- _Non-seasonal storage_: When the storage capacity of an asset is lower than the total length of representative periods, we recommend using the intra-temporal constraints. To apply these constraints, you must set the input parameter `is_seasonal` to `false` in the [`assets-data.csv`](@ref schemas). +- _Seasonal storage_: When the storage capacity of an asset is greater than the total length of representative periods, we recommend using the inter-temporal constraints. To apply these constraints, you must set the input parameter `is_seasonal` to `true` in the [`assets-data.csv`](@ref schemas). +- _Non-seasonal storage_: When the storage capacity of an asset is lower than the total length of representative periods, we recommend using the intra-temporal constraints. To apply these constraints, you must set the input parameter `is_seasonal` to `false` in the [`assets-data.csv`](@ref schemas). > **Note:** > If the input data covers only one representative period for the entire year, for example, with 8760-hour timesteps, and you have a monthly hydropower plant, then you should set the `is_seasonal` parameter for that asset to `false`. This is because the length of the representative period is greater than the storage capacity of the storage asset. @@ -356,14 +356,14 @@ Section [Storage Modeling](@ref storage-modeling) explains the main concepts for Energy storage assets have a unique characteristic wherein the investment is based not solely on the capacity to charge and discharge, but also on the energy capacity. Some storage asset types have a fixed duration for a given capacity, which means that there is a predefined ratio between energy and power. For instance, a battery of 10MW/unit and 4h duration implies that the energy capacity is 40MWh. Conversely, other storage asset types don't have a fixed ratio between the investment of capacity and storage capacity. Therefore, the energy capacity can be optimized independently of the capacity investment, such as hydrogen storage in salt caverns. To define if an energy asset is one type or the other then consider the following parameter setting in the file [`assets-data.csv`](@ref schemas): -- _Investment energy method_: To use this method, set the parameter `storage_method_energy` to `true`. In addition, it is necessary to define: +- _Investment energy method_: To use this method, set the parameter `storage_method_energy` to `true`. In addition, it is necessary to define: - - `investment_cost_storage_energy`: To establish the cost of investing in the storage capacity (e.g., kEUR/MWh/unit). - - `fixed_cost_storage_energy`: To establish the fixed cost of energy storage capacity (e.g., kEUR/MWh/unit). - - `investment_limit_storage_energy`: To define the potential of the energy capacity investment (e.g., MWh). `Missing` values mean that there is no limit. - - `investment_integer_storage_energy`: To determine whether the investment variables of storage capacity are integers of continuous. + - `investment_cost_storage_energy`: To establish the cost of investing in the storage capacity (e.g., kEUR/MWh/unit). + - `fixed_cost_storage_energy`: To establish the fixed cost of energy storage capacity (e.g., kEUR/MWh/unit). + - `investment_limit_storage_energy`: To define the potential of the energy capacity investment (e.g., MWh). `Missing` values mean that there is no limit. + - `investment_integer_storage_energy`: To determine whether the investment variables of storage capacity are integers of continuous. -- _Fixed energy-to-power ratio method_: To use this method, set the parameter `storage_method_energy` to `false`. In addition, it is necessary to define the parameter `energy_to_power_ratio` to establish the predefined duration of the storage asset or ratio between energy and power. Note that all the investment costs should be allocated in the parameter `investment_cost`. +- _Fixed energy-to-power ratio method_: To use this method, set the parameter `storage_method_energy` to `false`. In addition, it is necessary to define the parameter `energy_to_power_ratio` to establish the predefined duration of the storage asset or ratio between energy and power. Note that all the investment costs should be allocated in the parameter `investment_cost`. In addition, the parameter `capacity_storage_energy` in the [`graph-assets-data.csv`](@ref schemas) defines the energy per unit of storage capacity invested in (e.g., MWh/unit). @@ -373,9 +373,9 @@ For more details on the constraints that apply when selecting one method or the Depending on the configuration of the energy storage assets, it may or may not be possible to charge and discharge them simultaneously. For instance, a single battery cannot charge and discharge at the same time, but some pumped hydro storage technologies have separate components for charging (pump) and discharging (turbine) that can function independently, allowing them to charge and discharge simultaneously. To account for these differences, the model provides users with three options for the `use_binary_storage_method` parameter in the [`assets-data.csv`](@ref schemas) file: -- `binary`: the model adds a binary variable to prevent charging and discharging simultaneously. -- `relaxed_binary`: the model adds a binary variable that allows values between 0 and 1, reducing the likelihood of charging and discharging simultaneously. This option uses a tighter set of constraints close to the convex hull of the full formulation, resulting in fewer instances of simultaneous charging and discharging in the results. -- If no value is set, i.e., `missing` value, the storage asset can charge and discharge simultaneously. +- `binary`: the model adds a binary variable to prevent charging and discharging simultaneously. +- `relaxed_binary`: the model adds a binary variable that allows values between 0 and 1, reducing the likelihood of charging and discharging simultaneously. This option uses a tighter set of constraints close to the convex hull of the full formulation, resulting in fewer instances of simultaneous charging and discharging in the results. +- If no value is set, i.e., `missing` value, the storage asset can charge and discharge simultaneously. For more details on the constraints that apply when selecting this method, please visit the [`mathematical formulation`](@ref formulation) section. @@ -383,10 +383,10 @@ For more details on the constraints that apply when selecting this method, pleas The unit commitment constraints are only applied to producer and conversion assets. The `unit_commitment` parameter must be set to `true` to include the constraints in the [`assets-data.csv`](@ref schemas). Additionally, the following parameters should be set in that same file: -- `unit_commitment_method`: It determines which unit commitment method to use. The current version of the code only includes the basic version. Future versions will add more detailed constraints as additional options. -- `units_on_cost`: Objective function coefficient on `units_on` variable. (e.g., no-load cost or idling cost in kEUR/h/unit) -- `unit_commitment_integer`: It determines whether the unit commitment variables are considered as integer or not (`true` or `false`) -- `min_operating_point`: Minimum operating point or minimum stable generation level defined as a portion of the capacity of asset (p.u.) +- `unit_commitment_method`: It determines which unit commitment method to use. The current version of the code only includes the basic version. Future versions will add more detailed constraints as additional options. +- `units_on_cost`: Objective function coefficient on `units_on` variable. (e.g., no-load cost or idling cost in kEUR/h/unit) +- `unit_commitment_integer`: It determines whether the unit commitment variables are considered as integer or not (`true` or `false`) +- `min_operating_point`: Minimum operating point or minimum stable generation level defined as a portion of the capacity of asset (p.u.) For more details on the constraints that apply when selecting this method, please visit the [`mathematical formulation`](@ref formulation) section. @@ -394,8 +394,8 @@ For more details on the constraints that apply when selecting this method, pleas The ramping constraints are only applied to producer and conversion assets. The `ramping` parameter must be set to `true` to include the constraints in the [`assets-data.csv`](@ref schemas). Additionally, the following parameters should be set in that same file: -- `max_ramp_up`: Maximum ramping up rate as a portion of the capacity of asset (p.u./h) -- `max_ramp_down:`Maximum ramping down rate as a portion of the capacity of asset (p.u./h) +- `max_ramp_up`: Maximum ramping up rate as a portion of the capacity of asset (p.u./h) +- `max_ramp_down:`Maximum ramping down rate as a portion of the capacity of asset (p.u./h) For more details on the constraints that apply when selecting this method, please visit the [`mathematical formulation`](@ref formulation) section. @@ -403,17 +403,17 @@ For more details on the constraints that apply when selecting this method, pleas For the model to add constraints for a [maximum or minimum energy limit](@ref inter-temporal-energy-constraints) for an asset throughout the model's timeframe (e.g., a year), we need to establish a couple of parameters: -- `is_seasonal = true` in the [`assets-data.csv`](@ref schemas). This parameter enables the model to use the inter-temporal constraints. -- `max_energy_timeframe_partition` $\neq$ `missing` or `min_energy_timeframe_partition` $\neq$ `missing` in the [`assets-data.csv`](@ref schemas). This value represents the peak energy that will be then multiplied by the profile for each period in the timeframe. +- `is_seasonal = true` in the [`assets-data.csv`](@ref schemas). This parameter enables the model to use the inter-temporal constraints. +- `max_energy_timeframe_partition` $\neq$ `missing` or `min_energy_timeframe_partition` $\neq$ `missing` in the [`assets-data.csv`](@ref schemas). This value represents the peak energy that will be then multiplied by the profile for each period in the timeframe. > **Note:** > These parameters are defined per period, and the default values for profiles are 1.0 p.u. per period. If the periods are determined daily, the energy limit for the whole year will be 365 times `max`or `min_energy_timeframe_partition`. -- (optional) `profile_type` and `profile_name` in the [`assets-timeframe-profiles.csv`](@ref schemas) and the profile values in the [`profiles-timeframe.csv`](@ref schemas). If there is no profile defined, then by default it is 1.0 p.u. for all periods in the timeframe. -- (optional) define a period partition in [`assets-timeframe-partitions.csv`](@ref schemas). If there is no partition defined, then by default the constraint is created for each period in the timeframe, otherwise, it will consider the partition definition in the file. +- (optional) `profile_type` and `profile_name` in the [`assets-timeframe-profiles.csv`](@ref schemas) and the profile values in the [`profiles-timeframe.csv`](@ref schemas). If there is no profile defined, then by default it is 1.0 p.u. for all periods in the timeframe. +- (optional) define a period partition in [`assets-timeframe-partitions.csv`](@ref schemas). If there is no partition defined, then by default the constraint is created for each period in the timeframe, otherwise, it will consider the partition definition in the file. > **Tip:** > If you want to set a limit on the maximum or minimum outgoing energy for a year with representative days, you can use the partition definition to create a single partition for the entire year to combine the profile. -#### Example +### Example: Setting Energy Limits Let's assume we have a year divided into 365 days because we are using days as periods in the representatives from [_TulipaClustering.jl_](https://github.com/TulipaEnergy/TulipaClustering.jl). Also, we define the `max_energy_timeframe_partition = 10 MWh`, meaning the peak energy we want to have is 10MWh for each period or period partition. So depending on the optional information, we can have: @@ -441,15 +441,15 @@ Groups are useful to represent several common constraints, the following group c The mathematical formulation of the maximum and minimum investment limit for group constraints is available [here](@ref investment-group-constraints). The parameters to set up these constraints in the model are in the [`groups-data.csv`](@ref schemas) file. -- `invest_method = true`. This parameter enables the model to use the investment group constraints. -- `min_investment_limit` $\neq$ `missing` or `max_investment_limit` $\neq$ `missing`. This value represents the limits that will be imposed on the investment that belongs to the group. +- `invest_method = true`. This parameter enables the model to use the investment group constraints. +- `min_investment_limit` $\neq$ `missing` or `max_investment_limit` $\neq$ `missing`. This value represents the limits that will be imposed on the investment that belongs to the group. > **Notes:** > > 1. A missing value in the parameters `min_investment_limit` and `max_investment_limit` means that there is no investment limit. > 2. These constraints are applied to the investments each year. The model does not yet have investment limits to a group's accumulated invested capacity. -#### Example +### Example: Group of Assets Let's explore how the groups are set up in the test case called [Norse](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/tree/main/test/inputs/Norse). First, let's take a look at the groups-data.csv file: diff --git a/docs/src/tutorials.md b/docs/src/20-tutorials.md similarity index 100% rename from docs/src/tutorials.md rename to docs/src/20-tutorials.md diff --git a/docs/src/concepts.md b/docs/src/30-concepts.md similarity index 78% rename from docs/src/concepts.md rename to docs/src/30-concepts.md index 2de308a4..12e44a71 100644 --- a/docs/src/concepts.md +++ b/docs/src/30-concepts.md @@ -9,18 +9,18 @@ Depth = 3 _TulipaEnergyModel.jl_ incorporates two fundamental concepts that serve as the foundation of the optimization model: -- **Energy Assets**: representation of a physical asset that can produce, consume, store, balance, or convert energy. Some examples of what these assets can represent are: - - Producer: e.g., wind turbine, solar panel - - Consumer: e.g., electricity demand, heat demand - - Storage: e.g., battery, pumped-hydro storage - - Balancing Hub: e.g., an electricity network that serves as a connection among other energy assets - - Conversion: e.g., power plants, electrolyzers -- **Flows**: representation of the connections among assets, e.g., pipelines, transmission lines, or simply the energy production that goes from one asset to another. +- **Energy Assets**: representation of a physical asset that can produce, consume, store, balance, or convert energy. Some examples of what these assets can represent are: + - Producer: e.g., wind turbine, solar panel + - Consumer: e.g., electricity demand, heat demand + - Storage: e.g., battery, pumped-hydro storage + - Balancing Hub: e.g., an electricity network that serves as a connection among other energy assets + - Conversion: e.g., power plants, electrolyzers +- **Flows**: representation of the connections among assets, e.g., pipelines, transmission lines, or simply the energy production that goes from one asset to another. In a nutshell, the model guarantees a balance of energy for the various types of assets while considering the flow limits. It considers a set of [representative periods](@ref representative-periods) (e.g., days or weeks) for a given [timeframe](@ref timeframe) (e.g., a year) the user wants to analyze. Therefore, the model has two types of temporal (time) constraints to consider the different chronology characteristics of the assets: -- **Intra-temporal Constraints**: These constraints limit the asset or flow within a representative period. The intra-temporal constraints help to characterize the short-term operational dynamics of the assets. So far, the model considers balance and flow limitations within the representative period, but future developments will include unit commitment, ramping, and reserve constraints. -- **Inter-temporal Constraints**: These constraints combine the information of the representative periods and create limitations between them to recover chronological information across the whole timeframe. The inter-temporal constraints help to characterize the long-term operational dynamics of the assets (e.g., seasonality). So far, the model uses this type of constraint to model seasonal storage. Still, future developments will include, for example, maximum or minimum production/consumption for a year (or any timeframe). +- **Intra-temporal Constraints**: These constraints limit the asset or flow within a representative period. The intra-temporal constraints help to characterize the short-term operational dynamics of the assets. So far, the model considers balance and flow limitations within the representative period, but future developments will include unit commitment, ramping, and reserve constraints. +- **Inter-temporal Constraints**: These constraints combine the information of the representative periods and create limitations between them to recover chronological information across the whole timeframe. The inter-temporal constraints help to characterize the long-term operational dynamics of the assets (e.g., seasonality). So far, the model uses this type of constraint to model seasonal storage. Still, future developments will include, for example, maximum or minimum production/consumption for a year (or any timeframe). The [`mathematical formulation`](@ref formulation) shows an overview of these constraints and the variables in the model. @@ -56,14 +56,14 @@ One of the core features of _TulipaEnergyModel.jl_ is that it can handle differe Therefore, for this simple example, we can determine the number of constraints and variables in the optimization problem: -- _Number of variables_: 42 since we have six connections among assets (i.e., 6 flows x 6 hours = 36 variables) and one storage asset (i.e., 1 storage level x 6 h = 6 variables) -- _Number of constraints_: 72, which are: +- _Number of variables_: 42 since we have six connections among assets (i.e., 6 flows x 6 hours = 36 variables) and one storage asset (i.e., 1 storage level x 6 h = 6 variables) +- _Number of constraints_: 72, which are: - - 24 from the maximum output limit of the assets that produce, convert, or discharge energy (i.e., `H2`, `wind`, `ccgt`, and `phs`) for each hour (i.e., 4 assets x 6 h = 24 constraints) - - 6 from the maximum input limit of the storage or charging limit for the `phs` - - 6 from the maximum storage level limit for the `phs` - - 12 from the import and export limits for the transmission line between the `balance` hub and the `demand` - - 24 from the energy balance on the consumer, hub, conversion, and storage assets (i.e., `demand`, `balance`, `ccgt`, and `phs`) for each hour (i.e., 4 assets x 6 h = 24 constraints) + - 24 from the maximum output limit of the assets that produce, convert, or discharge energy (i.e., `H2`, `wind`, `ccgt`, and `phs`) for each hour (i.e., 4 assets x 6 h = 24 constraints) + - 6 from the maximum input limit of the storage or charging limit for the `phs` + - 6 from the maximum storage level limit for the `phs` + - 12 from the import and export limits for the transmission line between the `balance` hub and the `demand` + - 24 from the energy balance on the consumer, hub, conversion, and storage assets (i.e., `demand`, `balance`, `ccgt`, and `phs`) for each hour (i.e., 4 assets x 6 h = 24 constraints) Depending on the input data and the level of detail you want to model, hourly resolution in all the variables might not be necessary. _TulipaEnergyModel.jl_ can have different time resolutions for each asset and flow to simplify the optimization problem and approximate hourly representation. This feature is useful for large-scale energy systems that involve multiple sectors, as detailed granularity is not always necessary due to the unique temporal dynamics of each sector. For instance, we can use hourly resolution for the electricity sector and six-hour resolution for the hydrogen sector. We can couple multiple sectors, each with its own temporal resolution. @@ -96,19 +96,19 @@ The following figure illustrates these definitions on the example system. So, let's recap: -- The hydrogen producer (`H2`) is in a 6-hour resolution represented by the range `1:6`, meaning that the balance of the hydrogen produced is for every 6 hours. -- The flow from the hydrogen producer to the ccgt power plant (`H2,ccgt`) is also in a 6-hour resolution `1:6`. -- The flow from the ccgt power plant to the balance hub (`ccgt, balance`) has hourly resolution `[1,2,3,4,5,6]`. -- The `ccgt` is a conversion plant that takes hydrogen to produce electricity. Since both sectors have different time resolutions, the energy balance in the conversion asset is defined in the lowest resolution connecting to the asset. In this case, the energy balance in the `ccgt` is defined every 6 hours, i.e., in the range `1:6`. -- The `wind` producer has an hourly profile of electricity production, so the resolution of the asset is hourly. -- The `wind` producer output has two connections, one to the `balance` hub and the other to the pumped-hydro storage (`phs`) with different resolutions: - - The flow from the wind producer to the phs storage (`wind, phs`) has a uniform resolution of two blocks from hours 1 to 3 (i.e., `1:3`) and from hours 4 to 6 (i.e., `4:6`). - - The flow from the wind producer to the balance hub (`wind, balance`) has a variable resolution of two blocks, too, but from hours 1 to 2 (i.e., `1:2`) and from hours 3 to 6 (i.e., `3:6`). -- The `phs` is in a 6-hour resolution represented by the range `1:6`, meaning the storage balance is determined every 6 hours. -- The flow from the phs to the balance (`phs, balance`) represents the discharge of the `phs`. This flow has a variable resolution of two blocks from hours 1 to 4 (i.e., `1:4`) and from hours 5 to 6 (i.e., `5:6`), which differs from the one defined for the charging flow from the `wind` asset. -- The `demand` consumption has hourly input data with one connection to the `balance` hub: - - The flow from the balance hub to the demand (`balance, demand`) has a uniform resolution of 3 hours; therefore, it has two blocks, one from hours 1 to 3 (i.e., `1:3`) and the other from hours 4 to 6 (i.e., `4:6`). -- The `balance` hub integrates all the different assets with their different resolutions. The lowest resolution of all connections determines the balance equation for this asset. Therefore, the resulting resolution is into two blocks, one from hours 1 to 4 (i.e., `1:4`) and the other from hours 5 to 6 (i.e., `5:6`). +- The hydrogen producer (`H2`) is in a 6-hour resolution represented by the range `1:6`, meaning that the balance of the hydrogen produced is for every 6 hours. +- The flow from the hydrogen producer to the ccgt power plant (`H2,ccgt`) is also in a 6-hour resolution `1:6`. +- The flow from the ccgt power plant to the balance hub (`ccgt, balance`) has hourly resolution `[1,2,3,4,5,6]`. +- The `ccgt` is a conversion plant that takes hydrogen to produce electricity. Since both sectors have different time resolutions, the energy balance in the conversion asset is defined in the lowest resolution connecting to the asset. In this case, the energy balance in the `ccgt` is defined every 6 hours, i.e., in the range `1:6`. +- The `wind` producer has an hourly profile of electricity production, so the resolution of the asset is hourly. +- The `wind` producer output has two connections, one to the `balance` hub and the other to the pumped-hydro storage (`phs`) with different resolutions: + - The flow from the wind producer to the phs storage (`wind, phs`) has a uniform resolution of two blocks from hours 1 to 3 (i.e., `1:3`) and from hours 4 to 6 (i.e., `4:6`). + - The flow from the wind producer to the balance hub (`wind, balance`) has a variable resolution of two blocks, too, but from hours 1 to 2 (i.e., `1:2`) and from hours 3 to 6 (i.e., `3:6`). +- The `phs` is in a 6-hour resolution represented by the range `1:6`, meaning the storage balance is determined every 6 hours. +- The flow from the phs to the balance (`phs, balance`) represents the discharge of the `phs`. This flow has a variable resolution of two blocks from hours 1 to 4 (i.e., `1:4`) and from hours 5 to 6 (i.e., `5:6`), which differs from the one defined for the charging flow from the `wind` asset. +- The `demand` consumption has hourly input data with one connection to the `balance` hub: + - The flow from the balance hub to the demand (`balance, demand`) has a uniform resolution of 3 hours; therefore, it has two blocks, one from hours 1 to 3 (i.e., `1:3`) and the other from hours 4 to 6 (i.e., `4:6`). +- The `balance` hub integrates all the different assets with their different resolutions. The lowest resolution of all connections determines the balance equation for this asset. Therefore, the resulting resolution is into two blocks, one from hours 1 to 4 (i.e., `1:4`) and the other from hours 5 to 6 (i.e., `5:6`). > **Note:** > This example demonstrates that different time resolutions can be assigned to each asset and flow in the model. Additionally, the resolutions do not need to be uniform and can vary throughout the horizon. @@ -117,11 +117,11 @@ The complete input data for this example can be found [here](https://github.com/ Due to the flexible resolution, we must explicitly state how the constraints are constructed. For each constraint, three things need to be considered: -- Whether it is type _power_ or type _energy_. - - type _power_: highest resolution - - type _energy_: lowest resolution (multiplied by durations) -- How the resolution is determined (regardless of whether it is highest or lowest): the incoming flows, the outgoing flows, or a combination of both. -- How the related parameters are treated. We use two methods of aggregation, _sum_ or _mean_. +- Whether it is type _power_ or type _energy_. + - type _power_: highest resolution + - type _energy_: lowest resolution (multiplied by durations) +- How the resolution is determined (regardless of whether it is highest or lowest): the incoming flows, the outgoing flows, or a combination of both. +- How the related parameters are treated. We use two methods of aggregation, _sum_ or _mean_. Below is the table outlining the details for each type of constraint. Note _min_ means highest resolution, and _max_ means lowest resolution. @@ -329,14 +329,14 @@ The table below shows the constraints and variables for each approach over a 6-h By comparing the classic approach with the other methods, we can analyze their differences: -- The flexible connection with hourly resolution reduces 6 variables ($12.5\%$) and 12 constraints ($\approx 14\%$). Notice that we include the 6 extra constraints related to not allowing charging from the grid, although these constraints can also be modeled as bounds. Finally, the objective function value is the same, since we use an hourly time resolution in both cases. -- The combination of features reduces 32 variables ($\approx 67\%$) and 55 constraints ($\approx 65\%$) with an approximation error of $\approx 0.073\%$. +- The flexible connection with hourly resolution reduces 6 variables ($12.5\%$) and 12 constraints ($\approx 14\%$). Notice that we include the 6 extra constraints related to not allowing charging from the grid, although these constraints can also be modeled as bounds. Finally, the objective function value is the same, since we use an hourly time resolution in both cases. +- The combination of features reduces 32 variables ($\approx 67\%$) and 55 constraints ($\approx 65\%$) with an approximation error of $\approx 0.073\%$. The level of reduction and approximation error will depend on the case study. Some cases that would benefit from this feature include: -- Coupling different energy sectors with various dynamics. For instance, methane, hydrogen, and heat sectors can be represented in energy models with lower resolutions (e.g., 4, 6, or 12h) than the electricity sector, usually modeled in higher resolutions (e.g., 1h, 30 min). +- Coupling different energy sectors with various dynamics. For instance, methane, hydrogen, and heat sectors can be represented in energy models with lower resolutions (e.g., 4, 6, or 12h) than the electricity sector, usually modeled in higher resolutions (e.g., 1h, 30 min). -- Having high resolutions for all assets in a large-scale case study may not be necessary. For example, if analyzing a European case study focusing on a specific country like The Netherlands, hourly details for distant countries (such as Portugal and Spain) may not be required. However, one would still want to consider their effect on The Netherlands without causing too much computational burden. In such cases, flexible time resolution can maintain hourly details in the focus country, while reducing the detail in distant countries by increasing their resolution (to two hours or more). This reduction allows a broader scope without over-burdening computation. +- Having high resolutions for all assets in a large-scale case study may not be necessary. For example, if analyzing a European case study focusing on a specific country like The Netherlands, hourly details for distant countries (such as Portugal and Spain) may not be required. However, one would still want to consider their effect on The Netherlands without causing too much computational burden. In such cases, flexible time resolution can maintain hourly details in the focus country, while reducing the detail in distant countries by increasing their resolution (to two hours or more). This reduction allows a broader scope without over-burdening computation. ## [Flexible Time Resolution in the Unit Commitment and Ramping Constraints](@id flex-time-res-uc) @@ -378,10 +378,10 @@ Remember that the section [`mathematical formulation`](@ref formulation) shows t With this information, we can analyze the constraints in each of the following cases: -- Ramping in assets with multiple outputs -- Unit commitment in assets with constant time resolution -- Unit commitment and ramping in assets with flexible time resolution that are multiple of each other -- Unit commitment and ramping in assets with flexible time resolution that are not multiple of each other +- Ramping in assets with multiple outputs +- Unit commitment in assets with constant time resolution +- Unit commitment and ramping in assets with flexible time resolution that are multiple of each other +- Unit commitment and ramping in assets with flexible time resolution that are not multiple of each other We will analyze each case in the following sections, considering the constraints resolution defined in the summary table in the [flexible time resolution](@ref flex-time-res) section. For the sake of simplicity, we only show the asset $a$ and timestep block $b_k$ index and the constraints as they appear in the .lp file of the example, i.e., with all the coefficients and RHS values calculated from the input parameters. The .lp file can be exported using the keyword argument `write_lp_file = true` in the [`run_scenario`](@ref) function. @@ -395,10 +395,10 @@ Let's now take a look at the resulting constraints in the model. `max_ramp_up(gas)`: The first constraint starts in the second timestep block and takes the difference between the output flows above the minimum operating point from $b_k =$ `2:2` and $b_k =$ `1:1`. Note that since the `flow(gas,ccgt)` is the same in both timestep blocks, the only variables that appear in this first constraint are the ones associated with the `flow(gas,ocgt)`. The second constraint takes the difference between the output flows from $b_k =$ `3:3` and $b_k =$ `2:2`; in this case, there is a change in the `flow(gas, ocgt)`; therefore, the constraint considers both changes in the output flows of the asset. In addition, the ramping parameter is multiplied by the flow duration with the highest resolution, i.e., one hour, which is the duration of the `flow(gas,ocgt)`. -- $b_k =$ `2:2`: -1 `flow(gas,ocgt,1:1)` + 1 `flow(gas,ocgt,2:2)` <= 1494 -- $b_k =$ `3:3`: -1 `flow(gas,ocgt,2:2)` + 1 `flow(gas,ocgt,3:3)` - 1 `flow(gas,ccgt,1:2)` + 1 `flow(gas,ccgt,3:4)` <= 1494 -- $b_k =$ `4:4`: -1 `flow(gas,ocgt,3:3)` + 1 `flow(gas,ocgt,4:4)` <= 1494 -- $b_k =$ `5:5`: -1 `flow(gas,ocgt,4:4)` + 1 `flow(gas,ocgt,5:5)` - 1 `flow(gas,ccgt,3:4)` + 1 `flow(gas,ccgt,5:6)` <= 1494 +- $b_k =$ `2:2`: -1 `flow(gas,ocgt,1:1)` + 1 `flow(gas,ocgt,2:2)` <= 1494 +- $b_k =$ `3:3`: -1 `flow(gas,ocgt,2:2)` + 1 `flow(gas,ocgt,3:3)` - 1 `flow(gas,ccgt,1:2)` + 1 `flow(gas,ccgt,3:4)` <= 1494 +- $b_k =$ `4:4`: -1 `flow(gas,ocgt,3:3)` + 1 `flow(gas,ocgt,4:4)` <= 1494 +- $b_k =$ `5:5`: -1 `flow(gas,ocgt,4:4)` + 1 `flow(gas,ocgt,5:5)` - 1 `flow(gas,ccgt,3:4)` + 1 `flow(gas,ccgt,5:6)` <= 1494 For the maximum ramp down we have similiar constraints as the ones shown above. @@ -412,21 +412,21 @@ Let's now take a look at the resulting constraints in the model. Because everyth `limit_units_on(ocgt)`: The upper bound of the `units_o`n is the investment variable of the `asset` -- $b_k =$ `1:1`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,1:1)` <= 0 -- $b_k =$ `2:2`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,2:2)` <= 0 -- $b_k =$ `3:3`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,3:3)` <= 0 +- $b_k =$ `1:1`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,1:1)` <= 0 +- $b_k =$ `2:2`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,2:2)` <= 0 +- $b_k =$ `3:3`: -1 `assets_investment(ocgt)` + 1 `units_on(ocgt,3:3)` <= 0 `min_output_flow(ocgt)`: The minimum operating point is 10 MW, so the asset must produce an output flow greater than this value when the unit is online. -- $b_k =$ `1:1`: 1 `flow(ocgt,demand,1:1)` - 10 `units_on(ocgt,1:1)` >= 0 -- $b_k =$ `2:2`: 1 `flow(ocgt,demand,2:2)` - 10 `units_on(ocgt,2:2)` >= 0 -- $b_k =$ `3:3`: 1 `flow(ocgt,demand,3:3)` - 10 `units_on(ocgt,3:3)` >= 0 +- $b_k =$ `1:1`: 1 `flow(ocgt,demand,1:1)` - 10 `units_on(ocgt,1:1)` >= 0 +- $b_k =$ `2:2`: 1 `flow(ocgt,demand,2:2)` - 10 `units_on(ocgt,2:2)` >= 0 +- $b_k =$ `3:3`: 1 `flow(ocgt,demand,3:3)` - 10 `units_on(ocgt,3:3)` >= 0 `max_output_flow(ocgt)`: The capacity is 100 MW, so the asset must produce an output flow lower than this value when the unit is online. -- $b_k =$ `1:1`: 1 `flow(ocgt,demand,1:1)` - 100 `units_on(ocgt,1:1)` <= 0 -- $b_k =$ `2:2`: 1 `flow(ocgt,demand,2:2)` - 100 `units_on(ocgt,2:2)` <= 0 -- $b_k =$ `3:3`: 1 `flow(ocgt,demand,3:3)` - 100 `units_on(ocgt,3:3)` <= 0 +- $b_k =$ `1:1`: 1 `flow(ocgt,demand,1:1)` - 100 `units_on(ocgt,1:1)` <= 0 +- $b_k =$ `2:2`: 1 `flow(ocgt,demand,2:2)` - 100 `units_on(ocgt,2:2)` <= 0 +- $b_k =$ `3:3`: 1 `flow(ocgt,demand,3:3)` - 100 `units_on(ocgt,3:3)` <= 0 For the maximum ramp down we have similiar constraints as the ones shown above. @@ -440,43 +440,43 @@ Let's now take a look at the resulting constraints in the model. `limit_units_on(smr)`: The `units_on` variables are defined every 6h; therefore, the upper bound of the variable is also every 6h. In addition, the `smr` is not investable and has one existing unit that limits the commitment variables. -- $b_k =$ `1:6`: 1 `units_on(smr,1:6)` <= 1 -- $b_k =$ `7:12`: 1 `units_on(smr,7:12)` <= 1 -- $b_k =$ `13:18`: 1 `units_on(smr,13:18)` <= 1 -- $b_k =$ `19:24`: 1 `units_on(smr,19:24)` <= 1 +- $b_k =$ `1:6`: 1 `units_on(smr,1:6)` <= 1 +- $b_k =$ `7:12`: 1 `units_on(smr,7:12)` <= 1 +- $b_k =$ `13:18`: 1 `units_on(smr,13:18)` <= 1 +- $b_k =$ `19:24`: 1 `units_on(smr,19:24)` <= 1 `min_output_flow(smr)`: The minimum operating point is 150 MW, so the asset must produce an output flow greater than this value when the unit is online. Since the `units_on` variables are defined every 6h, the first six constraints show that the minimum operating point is multiplied by the variable in block `1:6`. The next six constraints are multiplied by the `units_on` in block `7:12`, and so on. -- $b_k =$ `1:1`: 1 `flow(smr,demand,1:1)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `2:2`: 1 `flow(smr,demand,2:2)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `3:3`: 1 `flow(smr,demand,3:3)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `4:4`: 1 `flow(smr,demand,4:4)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `5:5`: 1 `flow(smr,demand,5:5)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `6:6`: 1 `flow(smr,demand,6:6)` - 150 `units_on(smr,1:6)` >= 0 -- $b_k =$ `7:7`: 1 `flow(smr,demand,7:7)` - 150 `units_on(smr,7:12)` >= 0 -- $b_k =$ `8:8`: 1 `flow(smr,demand,8:8)` - 150 `units_on(smr,7:12)` >= 0 +- $b_k =$ `1:1`: 1 `flow(smr,demand,1:1)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `2:2`: 1 `flow(smr,demand,2:2)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `3:3`: 1 `flow(smr,demand,3:3)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `4:4`: 1 `flow(smr,demand,4:4)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `5:5`: 1 `flow(smr,demand,5:5)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `6:6`: 1 `flow(smr,demand,6:6)` - 150 `units_on(smr,1:6)` >= 0 +- $b_k =$ `7:7`: 1 `flow(smr,demand,7:7)` - 150 `units_on(smr,7:12)` >= 0 +- $b_k =$ `8:8`: 1 `flow(smr,demand,8:8)` - 150 `units_on(smr,7:12)` >= 0 `max_output_flow(smr)`: The capacity is 200 MW, so the asset must produce an output flow lower than this value when the unit is online. Similiar to the minimum operating point constraint, here the `units_on` for the timestep block `1:6` are used in the first six constraints, the `units_on` for the timestep block `7:12` are used in the next six constraints, and so on. -- $b_k =$ `1:1`: 1 `flow(smr,demand,1:1)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `2:2`: 1 `flow(smr,demand,2:2)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `3:3`: 1 `flow(smr,demand,3:3)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `4:4`: 1 `flow(smr,demand,4:4)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `5:5`: 1 `flow(smr,demand,5:5)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `6:6`: 1 `flow(smr,demand,6:6)` - 200 `units_on(smr,1:6)` <= 0 -- $b_k =$ `7:7`: 1 `flow(smr,demand,7:7)` - 200 `units_on(smr,7:12)` <= 0 -- $b_k =$ `8:8`: 1 `flow(smr,demand,8:8)` - 200 `units_on(smr,7:12)` <= 0 +- $b_k =$ `1:1`: 1 `flow(smr,demand,1:1)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `2:2`: 1 `flow(smr,demand,2:2)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `3:3`: 1 `flow(smr,demand,3:3)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `4:4`: 1 `flow(smr,demand,4:4)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `5:5`: 1 `flow(smr,demand,5:5)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `6:6`: 1 `flow(smr,demand,6:6)` - 200 `units_on(smr,1:6)` <= 0 +- $b_k =$ `7:7`: 1 `flow(smr,demand,7:7)` - 200 `units_on(smr,7:12)` <= 0 +- $b_k =$ `8:8`: 1 `flow(smr,demand,8:8)` - 200 `units_on(smr,7:12)` <= 0 `max_ramp_up(smr)`: The ramping capacity is 20MW, so the change in the output flow above the minimum operating point needs to be below that value when the asset is online. For constraints from `2:2` to `6:6`, the `units_on` variable is the same, i.e., `units_on` at timestep block `1:6`. The ramping constraint at timestep block `7:7` shows the `units_on` from the timestep block `1:6` and `7:12` since the change in the flow includes both variables. Note that if the `units_on` variable is zero in the timestep block `1:6`, then the ramping constraint at timestep block `7:7` allows the asset to go from zero flow to the minimum operating point plus the ramping capacity (i.e., 150 + 20 = 170). -- $b_k =$ `2:2`: -1 `flow(smr,demand,1:1)` + 1 `flow(smr,demand,2:2)` - 20 `units_on(smr,1:6)` <= 0 -- $b_k =$ `3:3`: -1 `flow(smr,demand,2:2)` + 1 `flow(smr,demand,3:3)` - 20 `units_on(smr,1:6)` <= 0 -- $b_k =$ `4:4`: -1 `flow(smr,demand,3:3)` + 1 `flow(smr,demand,4:4)` - 20 `units_on(smr,1:6)` <= 0 -- $b_k =$ `5:5`: -1 `flow(smr,demand,4:4)` + 1 `flow(smr,demand,5:5)` - 20 `units_on(smr,1:6)` <= 0 -- $b_k =$ `6:6`: -1 `flow(smr,demand,5:5)` + 1 `flow(smr,demand,6:6)` - 20 `units_on(smr,1:6)` <= 0 -- $b_k =$ `7:7`: -1 `flow(smr,demand,6:6)` + 1 `flow(smr,demand,7:7)` + 150 `units_on(smr,1:6)` - 170 `units_on(smr,7:12)` <= 0 -- $b_k =$ `8:8`: -1 `flow(smr,demand,7:7)` + 1 `flow(smr,demand,8:8)` - 20 `units_on(smr,7:12)` <= 0 -- $b_k =$ `9:9`: -1 `flow(smr,demand,8:8)` + 1 `flow(smr,demand,9:9)` - 20 `units_on(smr,7:12)` <= 0 +- $b_k =$ `2:2`: -1 `flow(smr,demand,1:1)` + 1 `flow(smr,demand,2:2)` - 20 `units_on(smr,1:6)` <= 0 +- $b_k =$ `3:3`: -1 `flow(smr,demand,2:2)` + 1 `flow(smr,demand,3:3)` - 20 `units_on(smr,1:6)` <= 0 +- $b_k =$ `4:4`: -1 `flow(smr,demand,3:3)` + 1 `flow(smr,demand,4:4)` - 20 `units_on(smr,1:6)` <= 0 +- $b_k =$ `5:5`: -1 `flow(smr,demand,4:4)` + 1 `flow(smr,demand,5:5)` - 20 `units_on(smr,1:6)` <= 0 +- $b_k =$ `6:6`: -1 `flow(smr,demand,5:5)` + 1 `flow(smr,demand,6:6)` - 20 `units_on(smr,1:6)` <= 0 +- $b_k =$ `7:7`: -1 `flow(smr,demand,6:6)` + 1 `flow(smr,demand,7:7)` + 150 `units_on(smr,1:6)` - 170 `units_on(smr,7:12)` <= 0 +- $b_k =$ `8:8`: -1 `flow(smr,demand,7:7)` + 1 `flow(smr,demand,8:8)` - 20 `units_on(smr,7:12)` <= 0 +- $b_k =$ `9:9`: -1 `flow(smr,demand,8:8)` + 1 `flow(smr,demand,9:9)` - 20 `units_on(smr,7:12)` <= 0 For the maximum ramp down we have similiar constraints as the ones shown above. @@ -490,31 +490,32 @@ Let's now take a look at the resulting constraints in the model. `limit_units_on(ccgt)`: The `units_on` variables are defined every 3h; therefore, the upper bound of the variable is also every 3h. In addition, the `ccgt` is investable and has one existing unit that limits the commitment variables. -- $b_k =$ `1:3`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,1:3)` <= 1 -- $b_k =$ `4:6`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,4:6)` <= 1 -- $b_k =$ `7:9`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,7:9)` <= 1 +- $b_k =$ `1:3`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,1:3)` <= 1 +- $b_k =$ `4:6`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,4:6)` <= 1 +- $b_k =$ `7:9`: -1 `assets_investment(ccgt)` + 1 `units_on(ccgt,7:9)` <= 1 `min_output_flow(ccgt)`: The minimum operating point is 50 MW, so the asset must produce an output flow greater than this value when the unit is online. Here, we can see the impact of the constraints of having different temporal resolutions that are not multiple of each other. For instance, the constraint is defined for all the intersections, so `1:2`, `3:3`, `4:4`, `5:6`, etc., to ensure that the minimum operating point is correctly defined considering all the timestep blocks of the `flow` and the `units_on` variables. -- $b_k =$ `1:2`: 1 `flow(ccgt,demand,1:2)` - 50 `units_on(ccgt,1:3)` >= 0 -- $b_k =$ `3:3`: 1 `flow(ccgt,demand,3:4)` - 50 `units_on(ccgt,1:3)` >= 0 -- $b_k =$ `4:4`: 1 `flow(ccgt,demand,3:4)` - 50 `units_on(ccgt,4:6)` >= 0 -- $b_k =$ `5:6`: 1 `flow(ccgt,demand,5:6)` - 50 `units_on(ccgt,4:6)` >= 0 +- $b_k =$ `1:2`: 1 `flow(ccgt,demand,1:2)` - 50 `units_on(ccgt,1:3)` >= 0 +- $b_k =$ `3:3`: 1 `flow(ccgt,demand,3:4)` - 50 `units_on(ccgt,1:3)` >= 0 +- $b_k =$ `4:4`: 1 `flow(ccgt,demand,3:4)` - 50 `units_on(ccgt,4:6)` >= 0 +- $b_k =$ `5:6`: 1 `flow(ccgt,demand,5:6)` - 50 `units_on(ccgt,4:6)` >= 0 `max_output_flows(ccgt)`: The capacity is 200 MW, so the asset must produce an output flow lower than this value when the unit is online. The situation is similar as in the minimum operating point constraint, we have constraints for all the intersections of the resolutions to ensure the correct definition of the maximum capacity. -- $b_k =$ `1:2`: 1 `flow(ccgt,demand,1:2)` - 200 `units_on(ccgt,1:3)` <= 0 -- $b_k =$ `3:3`: 1 `flow(ccgt,demand,3:4)` - 200 `units_on(ccgt,1:3)` <= 0 -- $b_k =$ `4:4`: 1 `flow(ccgt,demand,3:4)` - 200 `units_on(ccgt,4:6)` <= 0 -- $b_k =$ `5:6`: 1 `flow(ccgt,demand,5:6)` - 200 `units_on(ccgt,4:6)` <= 0 +- $b_k =$ `1:2`: 1 `flow(ccgt,demand,1:2)` - 200 `units_on(ccgt,1:3)` <= 0 +- $b_k =$ `3:3`: 1 `flow(ccgt,demand,3:4)` - 200 `units_on(ccgt,1:3)` <= 0 +- $b_k =$ `4:4`: 1 `flow(ccgt,demand,3:4)` - 200 `units_on(ccgt,4:6)` <= 0 +- $b_k =$ `5:6`: 1 `flow(ccgt,demand,5:6)` - 200 `units_on(ccgt,4:6)` <= 0 -`max_ramp_up(ccgt)`: The ramping capacity is 120MW, so the change in the output flow above the minimum operating point needs to be below that value when the asset is online. When the time resolutions of the flow and `units_on` are not multiples of each other, we encounter some counterintuitive constraints. For example, consider the constraint at timestep block `4:4`. This constraint only involves `units_on` variables because the flow above the minimum operating point at timestep block `4:4` differs from the previous timestep block `3:3` only in terms of the `units_on` variables. As a result, the ramping-up constraint establishes a relationship between the `units_on` variable at `1:3` and `4:6`. This means that if the unit is on at timestep `1:3`, then it must also be on at timestep `4:6`. However, this is redundant because there is already a flow variable defined for `3:4` that ensures this, thanks to the minimum operating point and maximum capacity constraints. Therefore, although this constraint is not incorrect, it is unnecessary due to the flexible time resolutions that are not multiples of each other. +`max_ramp_up(ccgt)`: The ramping capacity is 120MW, so the change in the output flow above the minimum operating point needs to be below that value when the asset is online. When the time resolutions of the flow and `units_on` are not multiples of each other, we encounter some counterintuitive constraints. For example, consider the constraint at timestep block `4:4`. This constraint only involves `units_on` variables because the flow above the minimum operating point at timestep block `4:4` differs from the previous timestep block `3:3` only in terms of the `units_on` variables. As a result, the ramping-up constraint establishes a relationship between the `units_on` variable at `1:3` and `4:6`. +This means that if the unit is on at timestep `1:3`, then it must also be on at timestep `4:6`. However, this is redundant because there is already a flow variable defined for `3:4` that ensures this, thanks to the minimum operating point and maximum capacity constraints. Therefore, although this constraint is not incorrect, it is unnecessary due to the flexible time resolutions that are not multiples of each other. -- $b_k =$ `3:3`: -1 `flow(ccgt,demand,1:2)` + 1 `flow(ccgt,demand,3:4)` - 120 `units_on(ccgt,1:3)` <= 0 -- $b_k =$ `4:4`: 50 `units_on(ccgt,1:3)` - 170 `units_on(ccgt,4:6)` <= 0 -- $b_k =$ `5:6`: -1 `flow(ccgt,demand,3:4)` + 1 `flow(ccgt,demand,5:6)` - 120 `units_on(ccgt,4:6)` <= 0 -- $b_k =$ `7:8`: -1 `flow(ccgt,demand,5:6)` + 1 `flow(ccgt,demand,7:8)` + 50 `units_on(ccgt,4:6)` - 170 `units_on(ccgt,7:9)` <= 0 -- $b_k =$ `9:9`: -1 `flow(ccgt,demand,7:8)` + 1 `flow(ccgt,demand,9:10)` - 120 `units_on(ccgt,7:9)` <= 0 +- $b_k =$ `3:3`: -1 `flow(ccgt,demand,1:2)` + 1 `flow(ccgt,demand,3:4)` - 120 `units_on(ccgt,1:3)` <= 0 +- $b_k =$ `4:4`: 50 `units_on(ccgt,1:3)` - 170 `units_on(ccgt,4:6)` <= 0 +- $b_k =$ `5:6`: -1 `flow(ccgt,demand,3:4)` + 1 `flow(ccgt,demand,5:6)` - 120 `units_on(ccgt,4:6)` <= 0 +- $b_k =$ `7:8`: -1 `flow(ccgt,demand,5:6)` + 1 `flow(ccgt,demand,7:8)` + 50 `units_on(ccgt,4:6)` - 170 `units_on(ccgt,7:9)` <= 0 +- $b_k =$ `9:9`: -1 `flow(ccgt,demand,7:8)` + 1 `flow(ccgt,demand,9:10)` - 120 `units_on(ccgt,7:9)` <= 0 For the maximum ramp down we have similiar constraints as the ones shown above. @@ -538,8 +539,8 @@ Energy storage systems can be broadly classified into two categories: seasonal a Both storage categories can be represented in _TulipaEnergyModel.jl_ using the representative periods approach: -- _Non-seasonal storage_: When the storage capacity of an asset is lower than the total length of representative periods, like in the case of a battery with a storage capacity of 4 hours and representative periods of 24-hour timesteps, intra-temporal constraints should be applied. -- _Seasonal storage_: When the storage capacity of an asset is greater than the total length of representative periods, like in the case of a hydroplant with a storage capacity of a month and representative periods of 24-hour timesteps, inter-temporal constraints should be applied. +- _Non-seasonal storage_: When the storage capacity of an asset is lower than the total length of representative periods, like in the case of a battery with a storage capacity of 4 hours and representative periods of 24-hour timesteps, intra-temporal constraints should be applied. +- _Seasonal storage_: When the storage capacity of an asset is greater than the total length of representative periods, like in the case of a hydroplant with a storage capacity of a month and representative periods of 24-hour timesteps, inter-temporal constraints should be applied. The equations of intra- and inter-temporal constraints for energy storage are available in the [`mathematical formulation`](@ref formulation). An example is shown in the following section to explain these concepts. In addition, the section [`seasonal and non-seasonal storage setup`](@ref seasonal-setup) shows how to set the parameters in the model to consider each type in the storage assets. diff --git a/docs/src/formulation.md b/docs/src/40-formulation.md similarity index 97% rename from docs/src/formulation.md rename to docs/src/40-formulation.md index ff1a0dad..163f7bfa 100644 --- a/docs/src/formulation.md +++ b/docs/src/40-formulation.md @@ -329,13 +329,13 @@ There are two types of constraints for energy storage assets: intra-temporal and In addition, we define the following expression to determine the energy investment limit of the storage assets. This expression takes two different forms depending on whether the storage asset belongs to the set $\mathcal{A}^{\text{se}}$ or not. -- Investment energy method: +- Investment energy method: ```math e^{\text{energy inv limit}}_{a} = p^{\text{energy capacity}}_a \cdot v^{\text{inv energy}}_{a} \quad \forall a \in \mathcal{A}^{\text{i}} \cap \mathcal{A}^{\text{se}} ``` -- Fixed energy-to-power ratio method: +- Fixed energy-to-power ratio method: ```math e^{\text{energy inv limit}}_{a} = p^{\text{energy to power ratio}}_a \cdot p^{\text{capacity}}_a \cdot v^{\text{inv}}_{a} \quad \forall a \in \mathcal{A}^{\text{i}} \cap (\mathcal{A}^{\text{s}} \setminus \mathcal{A}^{\text{se}}) @@ -366,7 +366,7 @@ v^{\text{intra-storage}}_{a,k,b_k} \geq p^{\text{min intra level}}_{a,k,b_k} \cd The cycling constraint for the intra-temporal constraints links the first timestep block ($b^{\text{first}}_k$) and the last one ($b^{\text{last}}_k$) in each representative period. The parameter $p^{\text{init storage level}}_{a}$ determines the considered equations in the model for this constraint: -- If parameter $p^{\text{init storage level}}_{a}$ is not defined, the intra-storage level of the last timestep block ($b^{\text{last}}_k$) is used as the initial value for the first timestep block in the [intra-temporal constraint for the storage balance](@ref intra-storage-balance). +- If parameter $p^{\text{init storage level}}_{a}$ is not defined, the intra-storage level of the last timestep block ($b^{\text{last}}_k$) is used as the initial value for the first timestep block in the [intra-temporal constraint for the storage balance](@ref intra-storage-balance). ```math \begin{aligned} @@ -375,7 +375,7 @@ v^{\text{intra-storage}}_{a,k,b^{\text{first}}_k} = v^{\text{intra-storage}}_{a, \end{aligned} ``` -- If parameter $p^{\text{init storage level}}_{a}$ is defined, we use it as the initial value for the first timestep block in the [intra-temporal constraint for the storage balance](@ref intra-storage-balance). In addition, the intra-storage level of the last timestep block ($b^{\text{last}}_k$) in each representative period must be greater than this initial value. +- If parameter $p^{\text{init storage level}}_{a}$ is defined, we use it as the initial value for the first timestep block in the [intra-temporal constraint for the storage balance](@ref intra-storage-balance). In addition, the intra-storage level of the last timestep block ($b^{\text{last}}_k$) in each representative period must be greater than this initial value. ```math \begin{aligned} @@ -420,7 +420,7 @@ v^{\text{inter-storage}}_{a,p} \geq p^{\text{min inter level}}_{a,p} \cdot (p^{\ The cycling constraint for the inter-temporal constraints links the first-period block ($p^{\text{first}}$) and the last one ($p^{\text{last}}$) in the timeframe. The parameter $p^{\text{init storage level}}_{a}$ determines the considered equations in the model for this constraint: -- If parameter $p^{\text{init storage level}}_{a}$ is not defined, the inter-storage level of the last period block ($p^{\text{last}}$) is used as the initial value for the first-period block in the [inter-temporal constraint for the storage balance](@ref inter-storage-balance). +- If parameter $p^{\text{init storage level}}_{a}$ is not defined, the inter-storage level of the last period block ($p^{\text{last}}$) is used as the initial value for the first-period block in the [inter-temporal constraint for the storage balance](@ref inter-storage-balance). ```math \begin{aligned} @@ -431,7 +431,7 @@ v^{\text{inter-storage}}_{a,p^{\text{first}}} = & v^{\text{inter-storage}}_{a,p^ \end{aligned} ``` -- If parameter $p^{\text{init storage level}}_{a}$ is defined, we use it as the initial value for the first-period block in the [inter-temporal constraint for the storage balance](@ref inter-storage-balance). In addition, the inter-storage level of the last period block ($p^{\text{last}}$) in the timeframe must be greater than this initial value. +- If parameter $p^{\text{init storage level}}_{a}$ is defined, we use it as the initial value for the first-period block in the [inter-temporal constraint for the storage balance](@ref inter-storage-balance). In addition, the inter-storage level of the last period block ($p^{\text{last}}$) in the timeframe must be greater than this initial value. ```math \begin{aligned} diff --git a/docs/src/90-contributing.md b/docs/src/90-contributing.md new file mode 100644 index 00000000..fbce7cd4 --- /dev/null +++ b/docs/src/90-contributing.md @@ -0,0 +1,30 @@ +# [Contributing guidelines](@id contributing) + +Great that you want to contribute to the development of Tulipa! Please read these guidelines and our [developer documentation](91-developer.md) to get you started. + +## GitHub Rules of Engagement + +- If you want to discuss something that isn't immediately actionable, post under Discussions. Convert it to an issue once it's actionable. +- All PR's should have an associated issue (unless it's a very minor fix). +- All issues should have 1 Type and 1+ Zone labels (unless Type: epic). +- Assign yourself to issues you want to address. Consider if you will be able to work on them in the near future (this week) — if not, leave them available for someone else. +- Set the issue Status to "In Progress" when you have started working on it. +- When finalizing a pull request, set the Status to "Ready for Review." If someone specific needs to review it, assign them as the reviewer (otherwise anyone can review). +- Issues addressed by merged PRs will automatically move to Done. +- If you want to discuss an issue at the next group meeting (or just get some attention), mark it with the "question" label. +- Issues without updates for 60 days (and PRs without updates in 30 days) will be labelled as "stale" and filtered out of view. There is a Stale project board to view and revive these. + +## Contributing Workflow + +Fork → Branch → Code → Push → Pull → Squash & Merge + +1. Fork the repository +2. Create a new branch (in your fork) +3. Do fantastic coding +4. Push to your fork +5. Create a pull request from your fork to the main repository +6. (After review) Squash and merge + +For a step-by-step guide to these steps, see our [developer documentation](91-developer.md). + +We use this workflow in our quest to achieve the [Utopic Git History](https://blog.esciencecenter.nl/the-utopic-git-history-d44b81c09593). diff --git a/README.dev.md b/docs/src/91-developer.md similarity index 73% rename from README.dev.md rename to docs/src/91-developer.md index ec762386..fd40564a 100644 --- a/README.dev.md +++ b/docs/src/91-developer.md @@ -16,14 +16,14 @@ once. To contribute to TulipaEnergyModel.jl, you need the following: -1. [Julia](https://julialang.org) programming language. -2. [Git](https://git-scm.com) for version control. -3. [VSCode](https://code.visualstudio.com) or any other editor. For VSCode, we recommend +1. [Julia](https://julialang.org) programming language. +2. [Git](https://git-scm.com) for version control. +3. [VSCode](https://code.visualstudio.com) or any other editor. For VSCode, we recommend to install a few extensions. You can do it by pressing <kbd>Ctrl</kbd> + <kbd>Shift</kbd> + <kbd>X</kbd> (or <kbd>⇧</kbd> + <kbd>⌘</kbd> + <kbd>X</kbd> on MacOS) and searching by the extension name. - [Julia for Visual Studio Code](https://www.julia-vscode.org); - [Git Graph](https://marketplace.visualstudio.com/items?itemName=mhutchie.git-graph). -4. [EditorConfig](https://editorconfig.org) for consistent code formatting. +4. [EditorConfig](https://editorconfig.org) for consistent code formatting. In VSCode, it is available as [an extension](https://marketplace.visualstudio.com/items?itemName=EditorConfig.EditorConfig). -5. [pre-commit](https://pre-commit.com) to run the linters and formatters. +5. [pre-commit](https://pre-commit.com) to run the linters and formatters. You can install `pre-commit` globally using @@ -45,9 +45,9 @@ To contribute to TulipaEnergyModel.jl, you need the following: env/Scripts/activate ``` - Note that there is no leading dot (`. `) in the above command. + Note that there is no leading dot (`.`) in the above command. -6. [JuliaFormatter.jl](https://github.com/domluna/JuliaFormatter.jl) for code +6. [JuliaFormatter.jl](https://github.com/domluna/JuliaFormatter.jl) for code formatting. To install it, open Julia REPL, for example, by typing in the @@ -71,28 +71,28 @@ To contribute to TulipaEnergyModel.jl, you need the following: open VSCode Settings (<kbd>Ctrl</kbd> + <kbd>,</kbd>), then in "Search Settings", type "Format on Save" and tick the first result: - ![Screenshot of Format on Save option](docs/images/FormatOnSave.png) + ![Screenshot of Format on Save option](./images/FormatOnSave.png) -7. [Prettier](https://prettier.io/) for markdown formatting. +7. [Prettier](https://prettier.io/) for markdown formatting. In VSCode, it is available as [an extension](https://marketplace.visualstudio.com/items?itemName=esbenp.prettier-vscode). Having enabled "Format on Save" for `JuliaFormatter` in the previous step will also enable "Format on Save" for `Prettier`, provided that `Prettier` is set as the default formatter for markdown files. To do so, in VSCode, open any markdown file, right-click on any area of the file, choose "Format Document With...", click "Configure Default Formatter..." situated at the bottom of the drop-list list at the top of the screen, and then choose `Prettier - Code formatter` as the default formatter. Once you are done, you can double-check it by again right-clicking on any area of the file and choosing "Format Document With...", and you should see `Prettier - Code formatter (default)`. -8. [LocalCoverage](https://github.com/JuliaCI/LocalCoverage.jl) for coverage +8. [LocalCoverage](https://github.com/JuliaCI/LocalCoverage.jl) for coverage testing. You can install it the same way you installed `JuliaFormatter`, that is, by opening Julia REPL in the package mode and typing: - ```julia - pkg> activate - pkg> add LocalCoverage - ``` + ```julia + pkg> activate + pkg> add LocalCoverage + ``` ### Forking the Repository Any changes should be done in a [fork](https://docs.github.com/en/get-started/quickstart/fork-a-repo). You can fork this repository directly on GitHub: -![Screenshot of Fork button on GitHub](docs/images/Fork.png) +![Screenshot of Fork button on GitHub](./images/Fork.png) After that, clone your fork and add this repository as upstream: @@ -108,7 +108,7 @@ git remote -v ``` You should see something similar to: -![Screenshot of remote names, showing origin and upstream](docs/images/Remotes.png) +![Screenshot of remote names, showing origin and upstream](./images/Remotes.png) If your names are wrong, use this command (with the relevant names) to correct it: @@ -181,18 +181,18 @@ Now, you can only commit if all the pre-commit tests pass. ## Code format and guidelines -This section will list the guidelines for code formatting **not enforced** by [JuliaFormatter](.JuliaFormatter.toml). +This section will list the guidelines for code formatting **not enforced** by JuliaFormatter. We will try to follow these during development and reviews. -- Naming - - `CamelCase` for classes and modules, - - `snake_case` for functions and variables, and - - `kebab-case` for file names. -- Use `using` instead of `import`, in the following way: - - Don't use pure `using Package`, always list all necessary objects with `using Package: A, B, C`. - - List obvious objects, e.g., `using JuMP: @variable`, since `@variable` is obviously from JuMP in this context, or `using Graph: SimpleDiGraph`, because it's a constructor with an obvious name. - - For other objects inside `Package`, use `using Package: Package` and explicitly call `Package.A` to use it, e.g., `DataFrames.groupby`. - - List all `using` in <src/TulipaEnergyModel.jl>. +- Naming + - `CamelCase` for classes and modules, + - `snake_case` for functions and variables, and + - `kebab-case` for file names. +- Use `using` instead of `import`, in the following way: + - Don't use pure `using Package`, always list all necessary objects with `using Package: A, B, C`. + - List obvious objects, e.g., `using JuMP: @variable`, since `@variable` is obviously from JuMP in this context, or `using Graph: SimpleDiGraph`, because it's a constructor with an obvious name. + - For other objects inside `Package`, use `using Package: Package` and explicitly call `Package.A` to use it, e.g., `DataFrames.groupby`. + - List all `using` in <src/TulipaEnergyModel.jl>. ## Contributing Workflow @@ -239,10 +239,10 @@ Create a branch to address the issue: git switch -c <branch_name> ``` -- If there is an associated issue, add the issue number to the branch name, +- If there is an associated issue, add the issue number to the branch name, for example, `123-short-description` for issue \#123. -- If there is no associated issue **and the changes are small**, add a prefix such as "typo", "hotfix", "small-refactor", according to the type of update. -- If the changes are not small and there is no associated issue, then create the issue first, so we can properly discuss the changes. +- If there is no associated issue **and the changes are small**, add a prefix such as "typo", "hotfix", "small-refactor", according to the type of update. +- If the changes are not small and there is no associated issue, then create the issue first, so we can properly discuss the changes. > **Note:** > Always branch from `main`, i.e., the main branch of your own fork. @@ -300,9 +300,9 @@ git push -u origin <branch_name> When writing the commit message: -- use imperative, present tense (Add feature, Fix bug); -- have informative titles; -- if necessary, add a body with details. +- use imperative, present tense (Add feature, Fix bug); +- have informative titles; +- if necessary, add a body with details. > **Note:** > Try to create "atomic git commits". Read @@ -350,7 +350,7 @@ When there are no more conflicts and all the test are passing, create a pull request to merge your remote branch into the org main. You can do this on GitHub by opening the branch in your fork and clicking "Compare & pull request". -![Screenshot of Compare & pull request button on GitHub](docs/images/CompareAndPR.png) +![Screenshot of Compare & pull request button on GitHub](./images/CompareAndPR.png) Fill in the pull request details: @@ -360,7 +360,7 @@ Fill in the pull request details: 4. (Optional) Choose a reviewer. 5. When all of the information is filled in, click "Create pull request". -![Screenshot of the pull request information](docs/images/PRInfo.png) +![Screenshot of the pull request information](./images/PRInfo.png) You pull request will appear in the list of pull requests in the TulipaEnergyModel.jl repository, where you can track the review process. @@ -376,29 +376,21 @@ For branches that were pushed to the main repo, it is recommended that you do so ## Building the Documentation Locally -To build and view the documentation locally, first, navigate to the `docs` folder -in your file explorer and open a terminal. Then, run `julia --project`. With the -`julia` open, enter the `pkg` mode by pressing `]`. -Check that the environment name is `docs`. The first time here, you have to run: - -```julia-pkg -docs> dev .. -docs> update -``` +Following the latest suggestions, we recommend using `LiveServer` to build the documentation. > **Note**: -> If you intend to rerun the build step, ensure you have the package `Revise` -> installed in your global environment, and run `using Revise` before including -> `make.jl`. Alternatively, close `julia` and reopen it. - -Then, to build the documentation, run in Julia: +> Ensure you have the package `Revise` installed in your global environment +> before running `servedocs`. -```julia -julia> include("make.jl") -``` +Here is how you do it: -After building, the documentation will be available in the folder `docs/build/`. -Open the `index.html` file on the browser to see it. +1. Run `julia --project=docs` in the package root to open Julia in the environment of the docs. +1. If this is the first time building the docs + 1. Press `]` to enter `pkg` mode + 1. Run `pkg> dev .` to use the development version of your package + 1. Press backspace to leave `pkg` mode +1. Run `julia> using LiveServer` +1. Run `julia> servedocs()` ## Performance Considerations @@ -414,11 +406,11 @@ post the results as a comment in you pull request. If you want to manually run the benchmarks, you can do the following: -- Navigate to the benchmark folder -- Run `julia --project=.` -- Enter `pkg` mode by pressing `]` -- Run `dev ..` to add the development version of TulipaEnergyModel -- Now run +- Navigate to the benchmark folder +- Run `julia --project=.` +- Enter `pkg` mode by pressing `]` +- Run `dev ..` to add the development version of TulipaEnergyModel +- Now run ```julia include("benchmarks.jl") @@ -430,12 +422,12 @@ If you want to manually run the benchmarks, you can do the following: To profile the code in a more manual way, here are some tips: -- Wrap your code into functions. -- Call the function once to precompile it. This must be done after every change to the function. -- Prefix the function call with `@time`. This is the most basic timing, part of Julia. -- Prefix the function call with `@btime`. This is part of the BenchmarkTools package, which you might need to install. `@btime` will evaluate the function a few times to give a better estimate. -- Prefix the function call with `@benchmark`. Also part of BenchmarkTools. This will produce a nice histogram of the times and give more information. `@btime` and `@benchmark` do the same thing in the background. -- Call `@profview`. This needs to be done in VSCode, or using the ProfileView package. This will create a flame graph, where each function call is a block. The size of the block is proportional to the aggregate time it takes to run. The blocks below a block are functions called inside the function above. +- Wrap your code into functions. +- Call the function once to precompile it. This must be done after every change to the function. +- Prefix the function call with `@time`. This is the most basic timing, part of Julia. +- Prefix the function call with `@btime`. This is part of the BenchmarkTools package, which you might need to install. `@btime` will evaluate the function a few times to give a better estimate. +- Prefix the function call with `@benchmark`. Also part of BenchmarkTools. This will produce a nice histogram of the times and give more information. `@btime` and `@benchmark` do the same thing in the background. +- Call `@profview`. This needs to be done in VSCode, or using the ProfileView package. This will create a flame graph, where each function call is a block. The size of the block is proportional to the aggregate time it takes to run. The blocks below a block are functions called inside the function above. See the file <benchmark/profiling.jl> for an example of profiling code. @@ -445,29 +437,29 @@ When publishing a new version of the model to the Julia Registry, follow this pr > **Note:** > To be able to register, you need to be a member of the organisation TulipaEnergy and have your visibility set to public: -> ![Screenshot of public members of TulipaEnergy on GitHub](docs/images/PublicMember.png) +> ![Screenshot of public members of TulipaEnergy on GitHub](./images/PublicMember.png) 1. Click on the `Project.toml` file on GitHub. 2. Edit the file and change the version number according to [semantic versioning](https://semver.org/): Major.Minor.Patch - ![Screenshot of editing Project.toml on GitHub](docs/images/UpdateVersion.png) + ![Screenshot of editing Project.toml on GitHub](./images/UpdateVersion.png) 3. Commit the changes in a new branch and open a pull request. Change the commit message according to the version number. - ![Screenshot of PR with commit message "Release 0.6.1"](docs/images/CommitMessage.png) + ![Screenshot of PR with commit message "Release 0.6.1"](./images/CommitMessage.png) 4. Create the pull request and squash & merge it after the review and testing process. Delete the branch after the squash and merge. - ![Screenshot of full PR template on GitHub](docs/images/PullRequest.png) + ![Screenshot of full PR template on GitHub](./images/PullRequest.png) 5. Go to the main page of repo and click in the commit. - ![Screenshot of how to access commit on GitHub](docs/images/AccessCommit.png) + ![Screenshot of how to access commit on GitHub](./images/AccessCommit.png) 6. Add the following comment to the commit: `@JuliaRegistrator register` - ![Screenshot of calling JuliaRegistrator in commit comments](docs/images/JuliaRegistrator.png) + ![Screenshot of calling JuliaRegistrator in commit comments](./images/JuliaRegistrator.png) 7. The bot should start the registration process. - ![Screenshot of JuliaRegistrator bot message](docs/images/BotProcess.png) + ![Screenshot of JuliaRegistrator bot message](./images/BotProcess.png) 8. After approval, the bot will take care of the PR at the Julia Registry and automatically create the release for the new version. - ![Screenshot of new version on registry](docs/images/NewRelease.png) + ![Screenshot of new version on registry](./images/NewRelease.png) Thank you for helping make frequent releases! diff --git a/docs/src/95-reference.md b/docs/src/95-reference.md new file mode 100644 index 00000000..f8a262ca --- /dev/null +++ b/docs/src/95-reference.md @@ -0,0 +1,9 @@ +# [Reference](@id reference) + +```@index +Pages = ["95-reference.md"] +``` + +```@autodocs +Modules = [TulipaEnergyModel] +``` diff --git a/docs/src/api.md b/docs/src/api.md deleted file mode 100644 index d5e6069d..00000000 --- a/docs/src/api.md +++ /dev/null @@ -1,4 +0,0 @@ -# API - -Coming Soon\ -In the meantime, see [How to Use](@ref how-to-use) and [Reference](@ref reference). diff --git a/docs/images/AccessCommit.png b/docs/src/images/AccessCommit.png similarity index 100% rename from docs/images/AccessCommit.png rename to docs/src/images/AccessCommit.png diff --git a/docs/images/BotProcess.png b/docs/src/images/BotProcess.png similarity index 100% rename from docs/images/BotProcess.png rename to docs/src/images/BotProcess.png diff --git a/docs/images/CommitMessage.png b/docs/src/images/CommitMessage.png similarity index 100% rename from docs/images/CommitMessage.png rename to docs/src/images/CommitMessage.png diff --git a/docs/images/CompareAndPR.png b/docs/src/images/CompareAndPR.png similarity index 100% rename from docs/images/CompareAndPR.png rename to docs/src/images/CompareAndPR.png diff --git a/docs/images/Fork.png b/docs/src/images/Fork.png similarity index 100% rename from docs/images/Fork.png rename to docs/src/images/Fork.png diff --git a/docs/images/FormatOnSave.png b/docs/src/images/FormatOnSave.png similarity index 100% rename from docs/images/FormatOnSave.png rename to docs/src/images/FormatOnSave.png diff --git a/docs/images/JuliaRegistrator.png b/docs/src/images/JuliaRegistrator.png similarity index 100% rename from docs/images/JuliaRegistrator.png rename to docs/src/images/JuliaRegistrator.png diff --git a/docs/images/NewRelease.png b/docs/src/images/NewRelease.png similarity index 100% rename from docs/images/NewRelease.png rename to docs/src/images/NewRelease.png diff --git a/docs/images/PRInfo.png b/docs/src/images/PRInfo.png similarity index 100% rename from docs/images/PRInfo.png rename to docs/src/images/PRInfo.png diff --git a/docs/images/PublicMember.png b/docs/src/images/PublicMember.png similarity index 100% rename from docs/images/PublicMember.png rename to docs/src/images/PublicMember.png diff --git a/docs/images/PullRequest.png b/docs/src/images/PullRequest.png similarity index 100% rename from docs/images/PullRequest.png rename to docs/src/images/PullRequest.png diff --git a/docs/images/Remotes.png b/docs/src/images/Remotes.png similarity index 100% rename from docs/images/Remotes.png rename to docs/src/images/Remotes.png diff --git a/docs/images/UpdateVersion.png b/docs/src/images/UpdateVersion.png similarity index 100% rename from docs/images/UpdateVersion.png rename to docs/src/images/UpdateVersion.png diff --git a/docs/src/index.md b/docs/src/index.md index 209f3cdc..a2f8c768 100644 --- a/docs/src/index.md +++ b/docs/src/index.md @@ -27,3 +27,16 @@ Start a discussion [here](https://github.com/TulipaEnergy/TulipaEnergyModel.jl/d ```@contents Pages = ["index.md","features.md","formulation.md","how-to-use.md","tutorials.md","api.md","reference.md"] ``` + +## Contributors + +```@raw html +<!-- ALL-CONTRIBUTORS-LIST:START - Do not remove or modify this section --> +<!-- prettier-ignore-start --> +<!-- markdownlint-disable --> + +<!-- markdownlint-restore --> +<!-- prettier-ignore-end --> + +<!-- ALL-CONTRIBUTORS-LIST:END --> +``` diff --git a/docs/src/reference.md b/docs/src/reference.md deleted file mode 100644 index 584ea756..00000000 --- a/docs/src/reference.md +++ /dev/null @@ -1,16 +0,0 @@ -# [Reference](@id reference) - -​This is a comprehensive list of all of the functions in the model.\ -The function most pertinent to the user is: [`run_scenario`](@ref) - -## Index - -```@index -Pages = ["reference.md"] -``` - -## Contents - -```@autodocs -Modules = [TulipaEnergyModel] -``` diff --git a/test/runtests.jl b/test/runtests.jl index 1429d14d..d9379ef0 100644 --- a/test/runtests.jl +++ b/test/runtests.jl @@ -19,10 +19,23 @@ if !isdir(OUTPUT_FOLDER) mkdir(OUTPUT_FOLDER) end -# Run all files in test folder starting with `test-` and ending with `.jl` -test_files = filter(file -> startswith("test-")(file) && endswith(".jl")(file), readdir(@__DIR__)) -for file in test_files - include(file) +#= +Don't add your tests to runtests.jl. Instead, create files named + + test-title-for-my-test.jl + +The file will be automatically included inside a `@testset` with title "Title For My Test". +=# +for (root, dirs, files) in walkdir(@__DIR__) + for file in files + if isnothing(match(r"^test-.*\.jl$", file)) + continue + end + title = titlecase(replace(splitext(file[6:end])[1], "-" => " ")) + @testset "$title" begin + include(file) + end + end end # Other general tests that don't need their own file