diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
new file mode 100644
index 0000000..a23edb0
--- /dev/null
+++ b/.github/workflows/docs.yml
@@ -0,0 +1,27 @@
+# Basic ODK workflow
+name: Docs
+
+# Controls when the action will run.
+on:
+ # Allows you to run this workflow manually from the Actions tab
+ workflow_dispatch:
+ push:
+ branches:
+ - main
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+ build:
+ name: Deploy docs
+ runs-on: ubuntu-latest
+ steps:
+ - name: Checkout main
+ uses: actions/checkout@v3
+
+ - name: Deploy docs
+ uses: mhausenblas/mkdocs-deploy-gh-pages@master
+ # Or use mhausenblas/mkdocs-deploy-gh-pages@nomaterial to build without the mkdocs-material theme
+ env:
+ GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
+ CONFIG_FILE: mkdocs.yaml
+
diff --git a/.github/workflows/qc.yml b/.github/workflows/qc.yml
new file mode 100644
index 0000000..2dbf92b
--- /dev/null
+++ b/.github/workflows/qc.yml
@@ -0,0 +1,33 @@
+# Basic ODK workflow
+
+name: CI
+
+# Controls when the action will run.
+on:
+ # Triggers the workflow on push or pull request events but only for the main branch
+ push:
+ branches: [ main ]
+ pull_request:
+ branches: [ main ]
+
+ # Allows you to run this workflow manually from the Actions tab
+ workflow_dispatch:
+
+# A workflow run is made up of one or more jobs that can run sequentially or in parallel
+jobs:
+ # This workflow contains a single job called "ontology_qc"
+ ontology_qc:
+ # The type of runner that the job will run on
+ runs-on: ubuntu-latest
+ container: obolibrary/odkfull:v1.4.3
+
+ # Steps represent a sequence of tasks that will be executed as part of the job
+ steps:
+ # Checks-out your repository under $GITHUB_WORKSPACE, so your job can access it
+ - uses: actions/checkout@v3
+
+ - name: Run ontology QC checks
+ env:
+ DEFAULT_BRANCH: main
+ run: cd src/ontology && make ROBOT_ENV='ROBOT_JAVA_ARGS=-Xmx6G' test IMP=false PAT=false
+
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..5fa827d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,44 @@
+.DS_Store
+semantic.cache
+bin/
+
+*.tmp
+*.tmp.obo
+*.tmp.owl
+*.tmp.json
+
+.github/token.txt
+
+src/ontology/mirror
+src/ontology/mirror/*
+src/ontology/reports/*
+src/ontology/vivo.owl
+src/ontology/vivo.obo
+src/ontology/vivo.json
+src/ontology/vivo-base.*
+src/ontology/vivo-basic.*
+src/ontology/vivo-full.*
+src/ontology/vivo-simple.*
+src/ontology/vivo-simple-non-classified.*
+
+src/ontology/seed.txt
+src/ontology/dosdp-tools.log
+src/ontology/ed_definitions_merged.owl
+src/ontology/ontologyterms.txt
+src/ontology/simple_seed.txt
+src/ontology/patterns
+src/ontology/merged-vivo-edit.owl
+
+src/ontology/target/
+src/ontology/tmp/*
+!src/ontology/tmp/README.md
+
+src/ontology/run.sh.conf
+src/ontology/run.sh.env
+
+src/ontology/imports/*_terms_combined.txt
+
+src/patterns/data/**/*.ofn
+src/patterns/data/**/*.txt
+src/patterns/pattern_owl_seed.txt
+src/patterns/all_pattern_terms.txt
\ No newline at end of file
diff --git a/.idea/.gitignore b/.idea/.gitignore
new file mode 100644
index 0000000..26d3352
--- /dev/null
+++ b/.idea/.gitignore
@@ -0,0 +1,3 @@
+# Default ignored files
+/shelf/
+/workspace.xml
diff --git a/.idea/inspectionProfiles/Project_Default.xml b/.idea/inspectionProfiles/Project_Default.xml
new file mode 100644
index 0000000..cb54fda
--- /dev/null
+++ b/.idea/inspectionProfiles/Project_Default.xml
@@ -0,0 +1,12 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/inspectionProfiles/profiles_settings.xml b/.idea/inspectionProfiles/profiles_settings.xml
new file mode 100644
index 0000000..105ce2d
--- /dev/null
+++ b/.idea/inspectionProfiles/profiles_settings.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/misc.xml b/.idea/misc.xml
new file mode 100644
index 0000000..40dd3ae
--- /dev/null
+++ b/.idea/misc.xml
@@ -0,0 +1,7 @@
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/modules.xml b/.idea/modules.xml
new file mode 100644
index 0000000..ac2f119
--- /dev/null
+++ b/.idea/modules.xml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vcs.xml b/.idea/vcs.xml
new file mode 100644
index 0000000..35eb1dd
--- /dev/null
+++ b/.idea/vcs.xml
@@ -0,0 +1,6 @@
+
+
+
+
+
+
\ No newline at end of file
diff --git a/.idea/vivo.iml b/.idea/vivo.iml
new file mode 100644
index 0000000..d0876a7
--- /dev/null
+++ b/.idea/vivo.iml
@@ -0,0 +1,8 @@
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md
new file mode 100644
index 0000000..b547f39
--- /dev/null
+++ b/CODE_OF_CONDUCT.md
@@ -0,0 +1,46 @@
+# Contributor Covenant Code of Conduct
+
+## Our Pledge
+
+In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
+
+## Our Standards
+
+Examples of behavior that contributes to creating a positive environment include:
+
+* Using welcoming and inclusive language
+* Being respectful of differing viewpoints and experiences
+* Gracefully accepting constructive criticism
+* Focusing on what is best for the community
+* Showing empathy towards other community members
+
+Examples of unacceptable behavior by participants include:
+
+* The use of sexualized language or imagery and unwelcome sexual attention or advances
+* Trolling, insulting/derogatory comments, and personal or political attacks
+* Public or private harassment
+* Publishing others' private information, such as a physical or electronic address, without explicit permission
+* Other conduct which could reasonably be considered inappropriate in a professional setting
+
+## Our Responsibilities
+
+Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
+
+Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
+
+## Scope
+
+This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
+
+## Enforcement
+
+Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by [contacting the project team](contact.md). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
+
+Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
+
+## Attribution
+
+This code of conduct has been derived from the excellent code of conduct of the [ATOM project](https://github.com/atom/atom/blob/master/CODE_OF_CONDUCT.md) which in turn is adapted from the [Contributor Covenant][homepage], version 1.4, available at [https://contributor-covenant.org/version/1/4][version]
+
+[homepage]: https://contributor-covenant.org
+[version]: https://contributor-covenant.org/version/1/4/
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..9e2fef4
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,95 @@
+# Contributing to The VIVO Ontology
+
+:+1: First of all: Thank you for taking the time to contribute!
+
+The following is a set of guidelines for contributing to VIVO.
+These guidelines are not strict rules. Use your best judgment, and feel free to propose
+changes to this document in a pull request.
+
+## Table Of Contents
+
+- [Code of Conduct](#code-of-conduct)
+- [Guidelines for Contributions and Requests](#contributions)
+ * [Reporting problems with the ontology](#reporting-bugs)
+ * [Requesting new terms](#requesting-terms)
+ * [Adding new terms by yourself](#adding-terms)
+- [Best practices](#best-practices)
+ * [How to write a great issue?](#great-issues)
+ * [How to create a great pull/merge request?](#great-pulls)
+
+
+
+## Code of Conduct
+
+The The VIVO Ontology team strives to create a
+welcoming environment for editors, users and other contributors.
+Please carefully read our [Code of Conduct](CODE_OF_CONDUCT.md).
+
+
+
+## Guidelines for Contributions and Requests
+
+
+
+### Reporting problems with the ontology
+
+Please use our [Issue Tracker](https://github.com/StroemPhi/vivo/issues/) for reporting problems with the ontology.
+To learn how to write a good issue [see here](#great-issues).
+
+
+
+### Requesting new terms
+
+Before you write a new request, please consider the following:
+
+- **Does the term already exist?** Before submitting suggestions for new ontology terms, check whether the term exist,
+either as a primary term or a synonym term. You can search for your term using [OLS](http://www.ebi.ac.uk/ols/ontologies/vivo).
+- **Can you provide a definition for the term?** It should be very clear what the term means, and you should be
+able to provide a concise definition, ideally with a scientific reference.
+- **Is the ontology in scope for the term?** Sometimes, it is hard to tell whether a term "belongs" in
+and ontology. A rule of thumb is "if a similar term already exists, the new term is probably in scope."
+It can be very helpful to mention a very similar concept as part of the term request!
+
+#### Who can request a term?
+
+Anyone can request new terms. However, there is not guarantee that your term will be added automatically. Since this is a
+community resource, it is often necessary to do at least some of the work of adding the term yourself, see below.
+
+#### How to write a new term request
+
+Request a new term _via_ the GitHub [Issue Tracker](https://github.com/StroemPhi/vivo/issues/).
+
+It is important to remember that it takes a lot of time for curators to process issues submitted to the tracker.
+To make this work easier, please always use issue templates if they are available (https://github.com/StroemPhi/vivo/issues/new/choose).
+
+For how to write a good term request, please read the [best practices carefully](#great-issues).
+
+
+
+### How to add a new term
+
+If you have never editted this ontology before, first follow a [general tutorial](https://oboacademy.github.io/obook/lesson/contributing-to-obo-ontologies)
+
+**Process**:
+
+1. Clone the repository (In case you are not an offical team member, create a fork first)
+1. Create new branch in git, for example `git checkout -b issue123`
+1. Open src/ontology/vivo-edit.owl in your favourite editor, i.e. [Protege](https://protege.stanford.edu/). **Careful:** double check you are editing the correct file. There are many ontology files in this repository, but only one _editors file_!
+1. Perform your edit and save your changes
+1. Commit changes to branch
+1. Push changes upstream
+1. Create pull request
+
+## Best Practices
+
+
+
+### How to write great issues?
+
+Please refer to the [OBO Academy best practices](https://oboacademy.github.io/obook/lesson/term-request/).
+
+
+
+### How to create a great pull/merge request?
+
+Please refer to the [OBO Academy best practices](https://oboacademy.github.io/obook/howto/github-create-pull-request/)
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..fb912d3
--- /dev/null
+++ b/README.md
@@ -0,0 +1,29 @@
+
+![Build Status](https://github.com/StroemPhi/vivo/workflows/CI/badge.svg)
+# The VIVO Ontology
+
+Description: The VIVO Ontology is used to represent scholarship.
+
+More information can be found at http://obofoundry.org/ontology/vivo
+
+## Versions
+
+### Stable release versions
+
+The latest version of the ontology can always be found at:
+
+http://vivoweb.org/ontology/core/vivo.owl
+
+(note this will not show up until the request has been approved by obofoundry.org)
+
+### Editors' version
+
+Editors of this ontology should use the edit version, [src/ontology/vivo-edit.owl](src/ontology/vivo-edit.owl)
+
+## Contact
+
+Please use this GitHub repository's [Issue tracker](https://github.com/StroemPhi/vivo/issues) to request new terms/classes or report errors or specific concerns related to the ontology.
+
+## Acknowledgements
+
+This ontology repository was created using the [Ontology Development Kit (ODK)](https://github.com/INCATools/ontology-development-kit).
\ No newline at end of file
diff --git a/docs/cite.md b/docs/cite.md
new file mode 100644
index 0000000..39cc3bb
--- /dev/null
+++ b/docs/cite.md
@@ -0,0 +1 @@
+# How to cite VIVO
diff --git a/docs/contributing.md b/docs/contributing.md
new file mode 100644
index 0000000..036055c
--- /dev/null
+++ b/docs/contributing.md
@@ -0,0 +1 @@
+# How to contribute to VIVO
diff --git a/docs/index.md b/docs/index.md
new file mode 100644
index 0000000..8222bb9
--- /dev/null
+++ b/docs/index.md
@@ -0,0 +1,7 @@
+# VIVO Ontology Documentation
+
+[//]: # "This file is meant to be edited by the ontology maintainer."
+
+Welcome to the VIVO documentation!
+
+You can find descriptions of the standard ontology engineering workflows [here](odk-workflows/index.md).
diff --git a/docs/odk-workflows/ContinuousIntegration.md b/docs/odk-workflows/ContinuousIntegration.md
new file mode 100644
index 0000000..5f33f1d
--- /dev/null
+++ b/docs/odk-workflows/ContinuousIntegration.md
@@ -0,0 +1,25 @@
+# Introduction to Continuous Integration Workflows with ODK
+
+Historically, most repos have been using Travis CI for continuous integration testing and building, but due to
+runtime restrictions, we recently switched a lot of our repos to GitHub actions. You can set up your repo with CI by adding
+this to your configuration file (src/ontology/vivo-odk.yaml):
+
+```
+ci:
+ - github_actions
+```
+
+When [updateing your repo](RepoManagement.md), you will notice a new file being added: `.github/workflows/qc.yml`.
+
+This file contains your CI logic, so if you need to change, or add anything, this is the place!
+
+Alternatively, if your repo is in GitLab instead of GitHub, you can set up your repo with GitLab CI by adding
+this to your configuration file (src/ontology/vivo-odk.yaml):
+
+```
+ci:
+ - gitlab-ci
+```
+
+This will add a file called `.gitlab-ci.yml` in the root of your repo.
+
diff --git a/docs/odk-workflows/EditorsWorkflow.md b/docs/odk-workflows/EditorsWorkflow.md
new file mode 100644
index 0000000..2d02b09
--- /dev/null
+++ b/docs/odk-workflows/EditorsWorkflow.md
@@ -0,0 +1,125 @@
+# Editors Workflow
+
+The editors workflow is one of the formal [workflows](index.md) to ensure that the ontology is developed correctly according to ontology engineering principles. There are a few different editors workflows:
+
+1. Local editing workflow: Editing the ontology in your local environment by hand, using tools such as Protégé, ROBOT templates or DOSDP patterns.
+2. Completely automated data pipeline (GitHub Actions)
+3. DROID workflow
+
+This document only covers the first editing workflow, but more will be added in the future
+
+### Local editing workflow
+
+Workflow requirements:
+
+- git
+- github
+- docker
+- editing tool of choice, e.g. Protégé, your favourite text editor, etc
+
+#### 1. _Create issue_
+Ensure that there is a ticket on your issue tracker that describes the change you are about to make. While this seems optional, this is a very important part of the social contract of building an ontology - no change to the ontology should be performed without a good ticket, describing the motivation and nature of the intended change.
+
+#### 2. _Update main branch_
+In your local environment (e.g. your laptop), make sure you are on the `main` (prev. `master`) branch and ensure that you have all the upstream changes, for example:
+
+```
+git checkout main
+git pull
+```
+
+#### 3. _Create feature branch_
+Create a new branch. Per convention, we try to use meaningful branch names such as:
+- issue23removeprocess (where issue 23 is the related issue on GitHub)
+- issue26addcontributor
+- release20210101 (for releases)
+
+On your command line, this looks like this:
+
+```
+git checkout -b issue23removeprocess
+```
+
+#### 4. _Perform edit_
+Using your editor of choice, perform the intended edit. For example:
+
+_Protégé_
+
+1. Open `src/ontology/vivo-edit.owl` in Protégé
+2. Make the change
+3. Save the file
+
+_TextEdit_
+
+1. Open `src/ontology/vivo-edit.owl` in TextEdit (or Sublime, Atom, Vim, Nano)
+2. Make the change
+3. Save the file
+
+Consider the following when making the edit.
+
+1. According to our development philosophy, the only places that should be manually edited are:
+ - `src/ontology/vivo-edit.owl`
+ - Any ROBOT templates you chose to use (the TSV files only)
+ - Any DOSDP data tables you chose to use (the TSV files, and potentially the associated patterns)
+ - components (anything in `src/ontology/components`), see [here](RepositoryFileStructure.md).
+2. Imports should not be edited (any edits will be flushed out with the next update). However, refreshing imports is a potentially breaking change - and is discussed [elsewhere](UpdateImports.md).
+3. Changes should usually be small. Adding or changing 1 term is great. Adding or changing 10 related terms is ok. Adding or changing 100 or more terms at once should be considered very carefully.
+
+#### 4. _Check the Git diff_
+This step is very important. Rather than simply trusting your change had the intended effect, we should always use a git diff as a first pass for sanity checking.
+
+In our experience, having a visual git client like [GitHub Desktop](https://desktop.github.com/) or [sourcetree](https://www.sourcetreeapp.com/) is really helpful for this part. In case you prefer the command line:
+
+```
+git status
+git diff
+```
+#### 5. Quality control
+Now it's time to run your quality control checks. This can either happen locally ([5a](#5a-local-testing)) or through your continuous integration system ([7/5b](#75b-continuous-integration-testing)).
+
+#### 5a. Local testing
+If you chose to run your test locally:
+
+```
+sh run.sh make IMP=false test
+```
+This will run the whole set of configured ODK tests on including your change. If you have a complex DOSDP pattern pipeline you may want to add `PAT=false` to skip the potentially lengthy process of rebuilding the patterns.
+
+```
+sh run.sh make IMP=false PAT=false test
+```
+
+#### 6. Pull request
+
+When you are happy with the changes, you commit your changes to your feature branch, push them upstream (to GitHub) and create a pull request. For example:
+
+```
+git add NAMEOFCHANGEDFILES
+git commit -m "Added biological process term #12"
+git push -u origin issue23removeprocess
+```
+
+Then you go to your project on GitHub, and create a new pull request from the branch, for example: https://github.com/INCATools/ontology-development-kit/pulls
+
+There is a lot of great advise on how to write pull requests, but at the very least you should:
+- mention the tickets affected: `see #23` to link to a related ticket, or `fixes #23` if, by merging this pull request, the ticket is fixed. Tickets in the latter case will be closed automatically by GitHub when the pull request is merged.
+- summarise the changes in a few sentences. Consider the reviewer: what would they want to know right away.
+- If the diff is large, provide instructions on how to review the pull request best (sometimes, there are many changed files, but only one important change).
+
+#### 7/5b. Continuous Integration Testing
+If you didn't run and local quality control checks (see [5a](#5a-local-testing)), you should have Continuous Integration (CI) set up, for example:
+- Travis
+- GitHub Actions
+
+More on how to set this up [here](ContinuousIntegration.md). Once the pull request is created, the CI will automatically trigger. If all is fine, it will show up green, otherwise red.
+
+#### 8. Community review
+Once all the automatic tests have passed, it is important to put a second set of eyes on the pull request. Ontologies are inherently social - as in that they represent some kind of community consensus on how a domain is organised conceptually. This seems high brow talk, but it is very important that as an ontology editor, you have your work validated by the community you are trying to serve (e.g. your colleagues, other contributors etc.). In our experience, it is hard to get more than one review on a pull request - two is great. You can set up GitHub branch protection to actually require a review before a pull request can be merged! We recommend this.
+
+This step seems daunting to some hopefully under-resourced ontologies, but we recommend to put this high up on your list of priorities - train a colleague, reach out!
+
+#### 9. Merge and cleanup
+When the QC is green and the reviews are in (approvals), it is time to merge the pull request. After the pull request is merged, remember to delete the branch as well (this option will show up as a big button right after you have merged the pull request). If you have not done so, close all the associated tickets fixed by the pull request.
+
+#### 10. Changelog (Optional)
+It is sometimes difficult to keep track of changes made to an ontology. Some ontology teams opt to document changes in a changelog (simply a text file in your repository) so that when release day comes, you know everything you have changed. This is advisable at least for major changes (such as a new release system, a new pattern or template etc.).
diff --git a/docs/odk-workflows/ManageDocumentation.md b/docs/odk-workflows/ManageDocumentation.md
new file mode 100644
index 0000000..8cfd956
--- /dev/null
+++ b/docs/odk-workflows/ManageDocumentation.md
@@ -0,0 +1,46 @@
+# Updating the Documentation
+
+The documentation for VIVO is managed in two places (relative to the repository root):
+
+1. The `docs` directory contains all the files that pertain to the content of the documentation (more below)
+2. the `mkdocs.yaml` file contains the documentation config, in particular its navigation bar and theme.
+
+The documentation is hosted using GitHub pages, on a special branch of the repository (called `gh-pages`). It is important that this branch is never deleted - it contains all the files GitHub pages needs to render and deploy the site. It is also important to note that _the gh-pages branch should never be edited manually_. All changes to the docs happen inside the `docs` directory on the `main` branch.
+
+## Editing the docs
+
+### Changing content
+All the documentation is contained in the `docs` directory, and is managed in _Markdown_. Markdown is a very simple and convenient way to produce text documents with formatting instructions, and is very easy to learn - it is also used, for example, in GitHub issues. This is a normal editing workflow:
+
+1. Open the `.md` file you want to change in an editor of choice (a simple text editor is often best). _IMPORTANT_: Do not edit any files in the `docs/odk-workflows/` directory. These files are managed by the ODK system and will be overwritten when the repository is upgraded! If you wish to change these files, make an issue on the [ODK issue tracker](https://github.com/INCATools/ontology-development-kit/issues).
+2. Perform the edit and save the file
+3. Commit the file to a branch, and create a pull request as usual.
+4. If your development team likes your changes, merge the docs into main branch.
+5. Deploy the documentation (see below)
+
+## Deploy the documentation
+
+The documentation is _not_ automatically updated from the Markdown, and needs to be deployed deliberately. To do this, perform the following steps:
+
+1. In your terminal, navigate to the edit directory of your ontology, e.g.:
+ ```
+ cd vivo/src/ontology
+ ```
+2. Now you are ready to build the docs as follows:
+ ```
+ sh run.sh make update_docs
+ ```
+ [Mkdocs](https://www.mkdocs.org/) now sets off to build the site from the markdown pages. You will be asked to
+ - Enter your username
+ - Enter your password (see [here](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) for using GitHub access tokens instead)
+ _IMPORTANT_: Using password based authentication will be deprecated this year (2021). Make sure you read up on [personal access tokens](https://docs.github.com/en/github/authenticating-to-github/creating-a-personal-access-token) if that happens!
+
+ If everything was successful, you will see a message similar to this one:
+
+ ```
+ INFO - Your documentation should shortly be available at: https://StroemPhi.github.io/vivo/
+ ```
+3. Just to double check, you can now navigate to your documentation pages (usually https://StroemPhi.github.io/vivo/).
+ Just make sure you give GitHub 2-5 minutes to build the pages!
+
+
diff --git a/docs/odk-workflows/ReleaseWorkflow.md b/docs/odk-workflows/ReleaseWorkflow.md
new file mode 100644
index 0000000..19820ec
--- /dev/null
+++ b/docs/odk-workflows/ReleaseWorkflow.md
@@ -0,0 +1,69 @@
+# The release workflow
+The release workflow recommended by the ODK is based on GitHub releases and works as follows:
+
+1. Run a release with the ODK
+2. Review the release
+3. Merge to main branch
+4. Create a GitHub release
+
+These steps are outlined in detail in the following.
+
+## Run a release with the ODK
+
+Preparation:
+
+1. Ensure that all your pull requests are merged into your main (master) branch
+2. Make sure that all changes to main are committed to GitHub (`git status` should say that there are no modified files)
+3. Locally make sure you have the latest changes from main (`git pull`)
+4. Checkout a new branch (e.g. `git checkout -b release-2021-01-01`)
+5. You may or may not want to refresh your imports as part of your release strategy (see [here](UpdateImports.md))
+6. Make sure you have the latest ODK installed by running `docker pull obolibrary/odkfull`
+
+To actually run the release, you:
+
+1. Open a command line terminal window and navigate to the src/ontology directory (`cd vivo/src/ontology`)
+2. Run release pipeline:`sh run.sh make prepare_release -B`. Note that for some ontologies, this process can take up to 90 minutes - especially if there are large ontologies you depend on, like PRO or CHEBI.
+3. If everything went well, you should see the following output on your machine: `Release files are now in ../.. - now you should commit, push and make a release on your git hosting site such as GitHub or GitLab`.
+
+This will create all the specified release targets (OBO, OWL, JSON, and the variants, ont-full and ont-base) and copy them into your release directory (the top level of your repo).
+
+## Review the release
+
+1. (Optional) Rough check. This step is frequently skipped, but for the more paranoid among us (like the author of this doc), this is a 3 minute additional effort for some peace of mind. Open the main release (vivo.owl) in you favourite development environment (i.e. Protégé) and eyeball the hierarchy. We recommend two simple checks:
+ 1. Does the very top level of the hierarchy look ok? This means that all new terms have been imported/updated correctly.
+ 2. Does at least one change that you know should be in this release appear? For example, a new class. This means that the release was actually based on the recent edit file.
+2. Commit your changes to the branch and make a pull request
+3. In your GitHub pull request, review the following three files in detail (based on our experience):
+ 1. `vivo.obo` - this reflects a useful subset of the whole ontology (everything that can be covered by OBO format). OBO format has that speaking for it: it is very easy to review!
+ 2. `vivo-base.owl` - this reflects the asserted axioms in your ontology that you have actually edited.
+ 3. Ideally also take a look at `vivo-full.owl`, which may reveal interesting new inferences you did not know about. Note that the diff of this file is sometimes quite large.
+4. Like with every pull request, we recommend to always employ a second set of eyes when reviewing a PR!
+
+## Merge the main branch
+Once your [CI checks](ContinuousIntegration.md) have passed, and your reviews are completed, you can now merge the branch into your main branch (don't forget to delete the branch afterwards - a big button will appear after the merge is finished).
+
+## Create a GitHub release
+
+1. Go to your releases page on GitHub by navigating to your repository, and then clicking on releases (usually on the right, for example: https://github.com/StroemPhi/vivo/releases). Then click "Draft new release"
+1. As the tag version you **need to choose the date on which your ontologies were build.** You can find this, for example, by looking at the `vivo.obo` file and check the `data-version:` property. The date needs to be prefixed with a `v`, so, for example `v2020-02-06`.
+1. You can write whatever you want in the release title, but we typically write the date again. The description underneath should contain a concise list of changes or term additions.
+1. Click "Publish release". Done.
+
+## Debugging typical ontology release problems
+
+### Problems with memory
+
+When you are dealing with large ontologies, you need a lot of memory. When you see error messages relating to large ontologies such as CHEBI, PRO, NCBITAXON, or Uberon, you should think of memory first, see [here](https://github.com/INCATools/ontology-development-kit/blob/master/docs/DealWithLargeOntologies.md).
+
+### Problems when using OBO format based tools
+
+Sometimes you will get cryptic error messages when using legacy tools using OBO format, such as the ontology release tool (OORT), which is also available as part of the ODK docker container. In these cases, you need to track down what axiom or annotation actually caused the breakdown. In our experience (in about 60% of the cases) the problem lies with duplicate annotations (`def`, `comment`) which are illegal in OBO. Here is an example recipe of how to deal with such a problem:
+
+1. If you get a message like `make: *** [cl.Makefile:84: oort] Error 255` you might have a OORT error.
+2. To debug this, in your terminal enter `sh run.sh make IMP=false PAT=false oort -B` (assuming you are already in the ontology folder in your directory)
+3. This should show you where the error is in the log (eg multiple different definitions)
+WARNING: THE FIX BELOW IS NOT IDEAL, YOU SHOULD ALWAYS TRY TO FIX UPSTREAM IF POSSIBLE
+4. Open `vivo-edit.owl` in Protégé and find the offending term and delete all offending issue (e.g. delete ALL definition, if the problem was "multiple def tags not allowed") and save.
+*While this is not idea, as it will remove all definitions from that term, it will be added back again when the term is fixed in the ontology it was imported from and added back in.
+5. Rerun `sh run.sh make IMP=false PAT=false oort -B` and if it all passes, commit your changes to a branch and make a pull request as usual.
+
diff --git a/docs/odk-workflows/RepoManagement.md b/docs/odk-workflows/RepoManagement.md
new file mode 100644
index 0000000..8d61469
--- /dev/null
+++ b/docs/odk-workflows/RepoManagement.md
@@ -0,0 +1,207 @@
+# Managing your ODK repository
+
+## Updating your ODK repository
+
+Your ODK repositories configuration is managed in `src/ontology/vivo-odk.yaml`. Once you have made your changes, you can run the following to apply your changes to the repository:
+
+
+```
+sh run.sh make update_repo
+```
+
+There are a large number of options that can be set to configure your ODK, but we will only discuss a few of them here.
+
+NOTE for Windows users:
+
+You may get a cryptic failure such as `Set Illegal Option -` if the update script located in `src/scripts/update_repo.sh`
+was saved using Windows Line endings. These need to change to unix line endings. In Notepad++, for example, you can
+click on Edit->EOL Conversion->Unix LF to change this.
+
+## Managing imports
+
+You can use the update repository workflow described on this page to perform the following operations to your imports:
+
+1. Add a new import
+2. Modify an existing import
+3. Remove an import you no longer want
+4. Customise an import
+
+We will discuss all these workflows in the following.
+
+
+### Add new import
+
+To add a new import, you first edit your odk config as described [above](#updating-your-odk-repository), adding an `id` to the `product` list in the `import_group` section (for the sake of this example, we assume you already import RO, and your goal is to also import GO):
+
+```
+import_group:
+ products:
+ - id: ro
+ - id: go
+```
+
+Note: our ODK file should only have one `import_group` which can contain multiple imports (in the `products` section). Next, you run the [update repo workflow](#updating-your-odk-repository) to apply these changes. Note that by default, this module is going to be a SLME Bottom module, see [here](http://robot.obolibrary.org/extract). To change that or customise your module, see section "Customise an import". To finalise the addition of your import, perform the following steps:
+
+1. Add an import statement to your `src/ontology/vivo-edit.owl` file. We suggest to do this using a text editor, by simply copying an existing import declaration and renaming it to the new ontology import, for example as follows:
+ ```
+ ...
+ Ontology(
+ Import()
+ Import()
+ ...
+ ```
+2. Add your imports redirect to your catalog file `src/ontology/catalog-v001.xml`, for example:
+ ```
+
+ ```
+3. Test whether everything is in order:
+ 1. [Refresh your import](UpdateImports.md)
+ 2. Open in your Ontology Editor of choice (Protege) and ensure that the expected terms are imported.
+
+Note: The catalog file `src/ontology/catalog-v001.xml` has one purpose: redirecting
+imports from URLs to local files. For example, if you have
+
+```
+Import()
+```
+
+in your editors file (the ontology) and
+
+```
+
+```
+
+in your catalog, tools like `robot` or Protégé will recognize the statement
+in the catalog file to redirect the URL `http://purl.obolibrary.org/obo/vivo/imports/go_import.owl`
+to the local file `imports/go_import.owl` (which is in your `src/ontology` directory).
+
+### Modify an existing import
+
+If you simply wish to refresh your import in light of new terms, see [here](UpdateImports.md). If you wish to change the type of your module see section "Customise an import".
+
+### Remove an existing import
+
+To remove an existing import, perform the following steps:
+
+1. remove the import declaration from your `src/ontology/vivo-edit.owl`.
+2. remove the id from your `src/ontology/vivo-odk.yaml`, eg. `- id: go` from the list of `products` in the `import_group`.
+3. run [update repo workflow](#updating-your-odk-repository)
+4. delete the associated files manually:
+ - `src/imports/go_import.owl`
+ - `src/imports/go_terms.txt`
+5. Remove the respective entry from the `src/ontology/catalog-v001.xml` file.
+
+### Customise an import
+
+By default, an import module extracted from a source ontology will be a SLME module, see [here](http://robot.obolibrary.org/extract). There are various options to change the default.
+
+The following change to your repo config (`src/ontology/vivo-odk.yaml`) will switch the go import from an SLME module to a simple ROBOT filter module:
+
+```
+import_group:
+ products:
+ - id: ro
+ - id: go
+ module_type: filter
+```
+
+A ROBOT filter module is, essentially, importing all external terms declared by your ontology (see [here](UpdateImports.md) on how to declare external terms to be imported). Note that the `filter` module does
+not consider terms/annotations from namespaces other than the base-namespace of the ontology itself. For example, in the
+example of GO above, only annotations / axioms related to the GO base IRI (http://purl.obolibrary.org/obo/GO_) would be considered. This
+behaviour can be changed by adding additional base IRIs as follows:
+
+
+```
+import_group:
+ products:
+ - id: go
+ module_type: filter
+ base_iris:
+ - http://purl.obolibrary.org/obo/GO_
+ - http://purl.obolibrary.org/obo/CL_
+ - http://purl.obolibrary.org/obo/BFO
+```
+
+If you wish to customise your import entirely, you can specify your own ROBOT command to do so. To do that, add the following to your repo config (`src/ontology/vivo-odk.yaml`):
+
+```
+import_group:
+ products:
+ - id: ro
+ - id: go
+ module_type: custom
+```
+
+Now add a new goal in your custom Makefile (`src/ontology/vivo.Makefile`, _not_ `src/ontology/Makefile`).
+
+```
+imports/go_import.owl: mirror/ro.owl imports/ro_terms_combined.txt
+ if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \
+ extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \
+ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/postprocess-module.ru \
+ annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@; fi
+```
+
+Now feel free to change this goal to do whatever you wish it to do! It probably makes some sense (albeit not being a strict necessity), to leave most of the goal instead and replace only:
+
+```
+extract -T imports/ro_terms_combined.txt --force true --individuals exclude --method BOT \
+```
+
+to another ROBOT pipeline.
+
+## Add a component
+
+A component is an import which _belongs_ to your ontology, e.g. is managed by
+you and your team.
+
+1. Open `src/ontology/vivo-odk.yaml`
+1. If you dont have it yet, add a new top level section `components`
+1. Under the `components` section, add a new section called `products`.
+This is where all your components are specified
+1. Under the `products` section, add a new component, e.g. `- filename: mycomp.owl`
+
+_Example_
+
+```
+components:
+ products:
+ - filename: mycomp.owl
+```
+
+When running `sh run.sh make update_repo`, a new file `src/ontology/components/mycomp.owl` will
+be created which you can edit as you see fit. Typical ways to edit:
+
+1. Using a ROBOT template to generate the component (see below)
+1. Manually curating the component separately with Protégé or any other editor
+1. Providing a `components/mycomp.owl:` make target in `src/ontology/vivo.Makefile`
+and provide a custom command to generate the component
+ - `WARNING`: Note that the custom rule to generate the component _MUST NOT_ depend on any other ODK-generated file such as seed files and the like (see [issue](https://github.com/INCATools/ontology-development-kit/issues/637)).
+1. Providing an additional attribute for the component in `src/ontology/vivo-odk.yaml`, `source`,
+to specify that this component should simply be downloaded from somewhere on the web.
+
+### Adding a new component based on a ROBOT template
+
+Since ODK 1.3.2, it is possible to simply link a ROBOT template to a component without having to specify any of the import logic. In order to add a new component that is connected to one or more template files, follow these steps:
+
+1. Open `src/ontology/vivo-odk.yaml`.
+1. Make sure that `use_templates: TRUE` is set in the global project options. You should also make sure that `use_context: TRUE` is set in case you are using prefixes in your templates that are not known to `robot`, such as `OMOP:`, `CPONT:` and more. All non-standard prefixes you are using should be added to `config/context.json`.
+1. Add another component to the `products` section.
+1. To activate this component to be template-driven, simply say: `use_template: TRUE`. This will create an empty template for you in the templates directory, which will automatically be processed when recreating the component (e.g. `run.bat make recreate-mycomp`).
+1. If you want to use more than one component, use the `templates` field to add as many template names as you wish. ODK will look for them in the `src/templates` directory.
+1. Advanced: If you want to provide additional processing options, you can use the `template_options` field. This should be a string with option from [robot template](http://robot.obolibrary.org/template). One typical example for additional options you may want to provide is `--add-prefixes config/context.json` to ensure the prefix map of your context is provided to `robot`, see above.
+
+_Example_:
+
+```
+components:
+ products:
+ - filename: mycomp.owl
+ use_template: TRUE
+ template_options: --add-prefixes config/context.json
+ templates:
+ - template1.tsv
+ - template2.tsv
+```
+
+_Note_: if your mirror is particularly large and complex, read [this ODK recommendation](https://github.com/INCATools/ontology-development-kit/blob/master/docs/DealWithLargeOntologies.md).
diff --git a/docs/odk-workflows/RepositoryFileStructure.md b/docs/odk-workflows/RepositoryFileStructure.md
new file mode 100644
index 0000000..96ea421
--- /dev/null
+++ b/docs/odk-workflows/RepositoryFileStructure.md
@@ -0,0 +1,32 @@
+# Repository structure
+
+The main kinds of files in the repository:
+
+1. Release files
+2. Imports
+3. [Components](#components)
+
+## Release files
+Release file are the file that are considered part of the official ontology release and to be used by the community. A detailed description of the release artefacts can be found [here](https://github.com/INCATools/ontology-development-kit/blob/master/docs/ReleaseArtefacts.md).
+
+## Imports
+Imports are subsets of external ontologies that contain terms and axioms you would like to re-use in your ontology. These are considered "external", like dependencies in software development, and are not included in your "base" product, which is the [release artefact](https://github.com/INCATools/ontology-development-kit/blob/master/docs/ReleaseArtefacts.md) which contains only those axioms that you personally maintain.
+
+These are the current imports in VIVO
+
+| Import | URL | Type |
+| ------ | --- | ---- |
+| bfo | http://purl.obolibrary.org/obo/bfo.owl | mirror |
+| ro | http://purl.obolibrary.org/obo/ro.owl | None |
+| omo | http://purl.obolibrary.org/obo/omo.owl | mirror |
+| iao | http://purl.obolibrary.org/obo/iao.owl | None |
+| obi | http://purl.obolibrary.org/obo/obi.owl | None |
+
+## Components
+Components, in contrast to imports, are considered full members of the ontology. This means that any axiom in a component is also included in the ontology base - which means it is considered _native_ to the ontology. While this sounds complicated, consider this: conceptually, no component should be part of more than one ontology. If that seems to be the case, we are most likely talking about an import. Components are often not needed for ontologies, but there are some use cases:
+
+1. There is an automated process that generates and re-generates a part of the ontology
+2. A part of the ontology is managed in ROBOT templates
+3. The expressivity of the component is higher than the format of the edit file. For example, people still choose to manage their ontology in OBO format (they should not) missing out on a lot of owl features. They may choose to manage logic that is beyond OBO in a specific OWL component.
+
+
diff --git a/docs/odk-workflows/SettingUpDockerForODK.md b/docs/odk-workflows/SettingUpDockerForODK.md
new file mode 100644
index 0000000..101fc22
--- /dev/null
+++ b/docs/odk-workflows/SettingUpDockerForODK.md
@@ -0,0 +1,13 @@
+# Setting up your Docker environment for ODK use
+
+One of the most frequent problems with running the ODK for the first time is failure because of lack of memory. This can look like a Java OutOfMemory exception,
+but more often than not it will appear as something like an `Error 137`. There are two places you need to consider to set your memory:
+
+1. Your src/ontology/run.sh (or run.bat) file. You can set the memory in there by adding
+`robot_java_args: '-Xmx8G'` to your src/ontology/vivo-odk.yaml file, see for example [here](https://github.com/INCATools/ontology-development-kit/blob/0e0aef2b26b8db05f5e78b7c38f807d04312d06a/configs/uberon-odk.yaml#L36).
+2. Set your docker memory. By default, it should be about 10-20% more than your `robot_java_args` variable. You can manage your memory settings
+by right-clicking on the docker whale in your system bar-->Preferences-->Resources-->Advanced, see picture below.
+
+![dockermemory](https://github.com/INCATools/ontology-development-kit/raw/master/docs/img/docker_memory.png)
+
+
diff --git a/docs/odk-workflows/UpdateImports.md b/docs/odk-workflows/UpdateImports.md
new file mode 100644
index 0000000..d0737af
--- /dev/null
+++ b/docs/odk-workflows/UpdateImports.md
@@ -0,0 +1,176 @@
+# Update Imports Workflow
+
+This page discusses how to update the contents of your imports, like adding or removing terms. If you are looking to customise imports, like changing the module type, see [here](RepoManagement.md).
+
+## Importing a new term
+
+Note: some ontologies now use a merged-import system to manage dynamic imports, for these please follow instructions in the section title "Using the Base Module approach".
+
+Importing a new term is split into two sub-phases:
+
+1. Declaring the terms to be imported
+2. Refreshing imports dynamically
+
+### Declaring terms to be imported
+There are three ways to declare terms that are to be imported from an external ontology. Choose the appropriate one for your particular scenario (all three can be used in parallel if need be):
+
+1. Protégé-based declaration
+2. Using term files
+3. Using the custom import template
+
+#### Protégé-based declaration
+
+This workflow is to be avoided, but may be appropriate if the editor _does not have access to the ODK docker container_.
+This approach also applies to ontologies that use base module import approach.
+
+1. Open your ontology (edit file) in Protégé (5.5+).
+1. Select 'owl:Thing'
+1. Add a new class as usual.
+1. Paste the _full iri_ in the 'Name:' field, for example, http://purl.obolibrary.org/obo/CHEBI_50906.
+1. Click 'OK'
+
+
+
+Now you can use this term for example to construct logical definitions. The next time the imports are refreshed (see how to refresh [here](#refresh-imports)), the metadata (labels, definitions, etc.) for this term are imported from the respective external source ontology and becomes visible in your ontology.
+
+
+#### Using term files
+
+Every import has, by default a term file associated with it, which can be found in the imports directory. For example, if you have a GO import in `src/ontology/go_import.owl`, you will also have an associated term file `src/ontology/go_terms.txt`. You can add terms in there simply as a list:
+
+```
+GO:0008150
+GO:0008151
+```
+
+Now you can run the [refresh imports workflow](#refresh-imports)) and the two terms will be imported.
+
+#### Using the custom import template
+
+This workflow is appropriate if:
+
+1. You prefer to manage all your imported terms in a single file (rather than multiple files like in the "Using term files" workflow above).
+2. You wish to augment your imported ontologies with additional information. This requires a cautionary discussion.
+
+To enable this workflow, you add the following to your ODK config file (`src/ontology/vivo-odk.yaml`), and [update the repository](RepoManagement.md):
+
+```
+use_custom_import_module: TRUE
+```
+
+Now you can manage your imported terms directly in the custom external terms template, which is located at `src/templates/external_import.owl`. Note that this file is a [ROBOT template](http://robot.obolibrary.org/template), and can, in principle, be extended to include any axioms you like. Before extending the template, however, read the following carefully.
+
+The main purpose of the custom import template is to enable the management off all terms to be imported in a centralised place. To enable that, you do not have to do anything other than maintaining the template. So if you, say currently import `APOLLO_SV:00000480`, and you wish to import `APOLLO_SV:00000532`, you simply add a row like this:
+
+```
+ID Entity Type
+ID TYPE
+APOLLO_SV:00000480 owl:Class
+APOLLO_SV:00000532 owl:Class
+```
+
+When the imports are refreshed [see imports refresh workflow](#refresh-imports), the term(s) will simply be imported from the configured ontologies.
+
+Now, if you wish to extend the Makefile (which is beyond these instructions) and add, say, synonyms to the imported terms, you can do that, but you need to (a) preserve the `ID` and `ENTITY` columns and (b) ensure that the ROBOT template is valid otherwise, [see here](http://robot.obolibrary.org/template).
+
+_WARNING_. Note that doing this is a _widespread antipattern_ (see related [issue](https://github.com/OBOFoundry/OBOFoundry.github.io/issues/1443)). You should not change the axioms of terms that do not belong into your ontology unless necessary - such changes should always be pushed into the ontology where they belong. However, since people are doing it, whether the OBO Foundry likes it or not, at least using the _custom imports module_ as described here localises the changes to a single simple template and ensures that none of the annotations added this way are merged into the [base file](https://github.com/INCATools/ontology-development-kit/blob/master/docs/ReleaseArtefacts.md#release-artefact-1-base-required).
+
+### Refresh imports
+
+If you want to refresh the import yourself (this may be necessary to pass the travis tests), and you have the ODK installed, you can do the following (using go as an example):
+
+First, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).
+```
+cd src/ontology
+```
+
+Then, you regenerate the import that will now include any new terms you have added. Note: You must have [docker installed](SettingUpDockerForODK.md).
+
+```
+sh run.sh make PAT=false imports/go_import.owl -B
+```
+
+Since ODK 1.2.27, it is also possible to simply run the following, which is the same as the above:
+
+```
+sh run.sh make refresh-go
+```
+
+Note that in case you changed the defaults, you need to add `IMP=true` and/or `MIR=true` to the command below:
+
+```
+sh run.sh make IMP=true MIR=true PAT=false imports/go_import.owl -B
+```
+
+If you wish to skip refreshing the mirror, i.e. skip downloading the latest version of the source ontology for your import (e.g. `go.owl` for your go import) you can set `MIR=false` instead, which will do the exact same thing as the above, but is easier to remember:
+
+```
+sh run.sh make IMP=true MIR=false PAT=false imports/go_import.owl -B
+```
+
+## Using the Base Module approach
+
+Since ODK 1.2.31, we support an entirely new approach to generate modules: Using base files.
+The idea is to only import axioms from ontologies that _actually belong to it_.
+A base file is a subset of the ontology that only contains those axioms that nominally
+belong there. In other words, the base file does not contain any axioms that belong
+to another ontology. An example would be this:
+
+Imagine this being the full Uberon ontology:
+
+```
+Axiom 1: BFO:123 SubClassOf BFO:124
+Axiom 1: UBERON:123 SubClassOf BFO:123
+Axiom 1: UBERON:124 SubClassOf UBERON 123
+```
+
+The base file is the set of all axioms that are about UBERON terms:
+
+```
+Axiom 1: UBERON:123 SubClassOf BFO:123
+Axiom 1: UBERON:124 SubClassOf UBERON 123
+```
+
+I.e.
+
+```
+Axiom 1: BFO:123 SubClassOf BFO:124
+```
+
+Gets removed.
+
+The base file pipeline is a bit more complex than the normal pipelines, because
+of the logical interactions between the imported ontologies. This is solved by _first
+merging all mirrors into one huge file and then extracting one mega module from it.
+
+Example: Let's say we are importing terms from Uberon, GO and RO in our ontologies.
+When we use the base pipelines, we
+
+1) First obtain the base (usually by simply downloading it, but there is also an option now to create it with ROBOT)
+2) We merge all base files into one big pile
+3) Then we extract a single module `imports/merged_import.owl`
+
+The first implementation of this pipeline is PATO, see https://github.com/pato-ontology/pato/blob/master/src/ontology/pato-odk.yaml.
+
+To check if your ontology uses this method, check src/ontology/vivo-odk.yaml to see if `use_base_merging: TRUE` is declared under `import_group`
+
+If your ontology uses Base Module approach, please use the following steps:
+
+First, add the term to be imported to the term file associated with it (see above "Using term files" section if this is not clear to you)
+
+Next, you navigate in your terminal to the ontology directory (underneath src in your hpo root directory).
+```
+cd src/ontology
+```
+
+Then refresh imports by running
+
+```
+sh run.sh make imports/merged_import.owl
+```
+Note: if your mirrors are updated, you can run `sh run.sh make no-mirror-refresh-merged`
+
+This requires quite a bit of memory on your local machine, so if you encounter an error, it might be a lack of memory on your computer. A solution would be to create a ticket in an issue tracker requesting for the term to be imported, and one of the local devs should pick this up and run the import for you.
+
+Lastly, restart Protégé, and the term should be imported in ready to be used.
+
diff --git a/docs/odk-workflows/components.md b/docs/odk-workflows/components.md
new file mode 100644
index 0000000..564821b
--- /dev/null
+++ b/docs/odk-workflows/components.md
@@ -0,0 +1,48 @@
+
+# Adding components to an ODK repo
+
+For details on what components are, please see component section of [repository file structure document](../odk-workflows/RepositoryFileStructure.md).
+
+To add custom components to an ODK repo, please follow the following steps:
+
+1) Locate your odk yaml file and open it with your favourite text editor (src/ontology/vivo-odk.yaml)
+2) Search if there is already a component section to the yaml file, if not add it accordingly, adding the name of your component:
+
+```
+components:
+ products:
+ - filename: your-component-name.owl
+```
+
+3) Add the component to your catalog file (src/ontology/catalog-v001.xml)
+
+```
+
+```
+
+4) Add the component to the edit file (src/ontology/vivo-edit.obo)
+for .obo formats:
+
+```
+import: http://vivoweb.org/ontology/core/vivo/components/your-component-name.owl
+```
+
+for .owl formats:
+
+```
+Import()
+```
+
+5) Refresh your repo by running `sh run.sh make update_repo` - this should create a new file in src/ontology/components.
+6) In your custom makefile (src/ontology/vivo.Makefile) add a goal for your custom make file. In this example, the goal is a ROBOT template.
+
+```
+$(COMPONENTSDIR)/your-component-name.owl: $(SRC) ../templates/your-component-template.tsv
+ $(ROBOT) template --template ../templates/your-component-template.tsv \
+ annotate --ontology-iri $(ONTBASE)/$@ --output $(COMPONENTSDIR)/your-component-name.owl
+```
+
+(If using a ROBOT template, do not forget to add your template tsv in src/templates/)
+
+7) Make the file by running `sh run.sh make components/your-component-name.owl`
+
diff --git a/docs/odk-workflows/index.md b/docs/odk-workflows/index.md
new file mode 100644
index 0000000..1ac5b89
--- /dev/null
+++ b/docs/odk-workflows/index.md
@@ -0,0 +1,10 @@
+# Default ODK Workflows
+
+- [Daily Editors Workflow](EditorsWorkflow.md)
+- [Release Workflow](ReleaseWorkflow.md)
+- [Manage your ODK Repository](RepoManagement.md)
+- [Setting up Docker for ODK](SettingUpDockerForODK.md)
+- [Imports management](UpdateImports.md)
+- [Managing the documentation](ManageDocumentation.md)
+- [Managing your Automated Testing](ManageAutomatedTest.md)
+
diff --git a/issue_template.md b/issue_template.md
new file mode 100644
index 0000000..b81ab4d
--- /dev/null
+++ b/issue_template.md
@@ -0,0 +1,21 @@
+For new term requests, please provide the following information:
+
+## Preferred term label
+
+(e.g., Asplenia)
+
+## Synonyms
+
+(e.g., Absent spleen)
+
+## Textual definition
+
+the definition should be understandable even for non-specialists. Include a PubMed ID to refer to any relevant article that provides additional information about the suggested term.
+
+## Suggested parent term
+
+Please look in the hierarchy in a browser such as [OLS](http://www.ebi.ac.uk/ols/ontologies/vivo)
+
+## Attribution
+
+If you would like a nanoattribution, please indicate your ORCID id
\ No newline at end of file
diff --git a/mkdocs.yaml b/mkdocs.yaml
new file mode 100644
index 0000000..5340071
--- /dev/null
+++ b/mkdocs.yaml
@@ -0,0 +1,40 @@
+site_name: The VIVO Ontology
+theme:
+ name: material
+ features:
+ - content.tabs.link
+plugins:
+ - search
+markdown_extensions:
+ - pymdownx.highlight:
+ - pymdownx.inlinehilite
+ - pymdownx.snippets
+ - pymdownx.superfences
+ - pymdownx.tabbed:
+ - pymdownx.critic
+ - pymdownx.caret
+ - pymdownx.keys
+ - pymdownx.mark
+ - pymdownx.tilde
+
+site_url: https://StroemPhi.github.io/vivo/
+repo_url: https://github.com/StroemPhi/vivo/
+
+nav:
+ - Getting started: index.md
+ - Cite: cite.md
+ - How-to guides:
+ - Standard ODK workflows:
+ - Overview: odk-workflows/index.md
+ - Editors Workflow: odk-workflows/EditorsWorkflow.md
+ - Release Workflow: odk-workflows/ReleaseWorkflow.md
+ - Manage your ODK Repository: odk-workflows/RepoManagement.md
+ - Setting up Docker for ODK: odk-workflows/SettingUpDockerForODK.md
+ - Imports management: odk-workflows/UpdateImports.md
+ - Components management: odk-workflows/components.md
+ - Managing the documentation: odk-workflows/ManageDocumentation.md
+ - Managing your automated testing: odk-workflows/ManageAutomatedTest.md
+ - Continuous Integration: odk-workflows/ContinuousIntegration.md
+ - Your ODK Repository Overview: odk-workflows/RepositoryFileStructure.md
+ - Contributing: contributing.md
+
diff --git a/src/metadata/README.md b/src/metadata/README.md
new file mode 100644
index 0000000..9281531
--- /dev/null
+++ b/src/metadata/README.md
@@ -0,0 +1,24 @@
+Metadata files for the OBO Library
+
+ * [vivo.yml](vivo.yml)
+ * Determines how your purl.obolibrary.org/obo/vivo/ redirects will be handled
+ * Go here: https://github.com/OBOFoundry/purl.obolibrary.org/tree/master/config
+ * Click [New File](https://github.com/OBOFoundry/purl.obolibrary.org/new/master/config)
+ * Paste in the contents of [vivo.yml](vivo.yml)
+ * Click "Commit new file"
+ * IMPORTANT: remember to make a pull request
+ * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library*
+ * [vivo.md](vivo.md)
+ * Determines how your metadata is shown on OBO Library, OLS and AberOWL
+ * Go here: https://github.com/OBOFoundry/OBOFoundry.github.io/tree/master/ontology
+ * Click [New File](https://github.com/OBOFoundry/OBOFoundry.github.io/new/master/ontology)
+ * Paste in the contents of [vivo.md](vivo.md)
+ * Click "Commit new file"
+ * IMPORTANT: remember to make a pull request
+ * An OBO admin will merge your Pull Request *providing it meets the requirements of the OBO library*
+
+For more background see:
+
+ * http://obofoundry.org/
+ * http://obofoundry.org/faq/how-do-i-edit-metadata.html
+
diff --git a/src/metadata/vivo.md b/src/metadata/vivo.md
new file mode 100644
index 0000000..81a7af5
--- /dev/null
+++ b/src/metadata/vivo.md
@@ -0,0 +1,48 @@
+---
+layout: ontology_detail
+id: vivo
+title: The VIVO Ontology
+jobs:
+ - id: https://travis-ci.org/StroemPhi/vivo
+ type: travis-ci
+build:
+ checkout: git clone https://github.com/StroemPhi/vivo.git
+ system: git
+ path: "."
+contact:
+ email:
+ label:
+ github:
+description: The VIVO Ontology is an ontology...
+domain: stuff
+homepage: https://github.com/StroemPhi/vivo
+products:
+ - id: vivo.owl
+ name: "The VIVO Ontology main release in OWL format"
+ - id: vivo.obo
+ name: "The VIVO Ontology additional release in OBO format"
+ - id: vivo.json
+ name: "The VIVO Ontology additional release in OBOJSon format"
+ - id: vivo/vivo-base.owl
+ name: "The VIVO Ontology main release in OWL format"
+ - id: vivo/vivo-base.obo
+ name: "The VIVO Ontology additional release in OBO format"
+ - id: vivo/vivo-base.json
+ name: "The VIVO Ontology additional release in OBOJSon format"
+dependencies:
+- id: bfo
+- id: ro
+- id: omo
+- id: iao
+- id: obi
+
+tracker: https://github.com/StroemPhi/vivo/issues
+license:
+ url: http://creativecommons.org/licenses/by/3.0/
+ label: CC-BY
+activity_status: active
+---
+
+Enter a detailed description of your ontology here. You can use arbitrary markdown and HTML.
+You can also embed images too.
+
diff --git a/src/metadata/vivo.yml b/src/metadata/vivo.yml
new file mode 100644
index 0000000..f67b6e4
--- /dev/null
+++ b/src/metadata/vivo.yml
@@ -0,0 +1,28 @@
+# PURL configuration for http://purl.obolibrary.org/obo/vivo
+
+idspace: VIVO
+base_url: /obo/vivo
+
+products:
+- vivo.owl: https://raw.githubusercontent.com/StroemPhi/vivo/main/vivo.owl
+- vivo.obo: https://raw.githubusercontent.com/StroemPhi/vivo/main/vivo.obo
+
+term_browser: ontobee
+example_terms:
+- VIVO_0000000
+
+entries:
+
+- prefix: /releases/
+ replacement: https://raw.githubusercontent.com/StroemPhi/vivo/v
+
+- prefix: /tracker/
+ replacement: https://github.com/StroemPhi/vivo/issues
+
+- prefix: /about/
+ replacement: http://www.ontobee.org/ontology/VIVO?iri=http://purl.obolibrary.org/obo/
+
+## generic fall-through, serve direct from github by default
+- prefix: /
+ replacement: https://raw.githubusercontent.com/StroemPhi/vivo/main/
+
diff --git a/src/ontology/Makefile b/src/ontology/Makefile
new file mode 100644
index 0000000..b2f6719
--- /dev/null
+++ b/src/ontology/Makefile
@@ -0,0 +1,625 @@
+# ----------------------------------------
+# Makefile for vivo
+# Generated using ontology-development-kit
+# ODK Version: v1.4.3
+# ----------------------------------------
+# IMPORTANT: DO NOT EDIT THIS FILE. To override default make goals, use vivo.Makefile instead
+
+
+# ----------------------------------------
+# More information: https://github.com/INCATools/ontology-development-kit/
+
+
+# ----------------------------------------
+# Standard Constants
+# ----------------------------------------
+# these can be overwritten on the command line
+
+OBOBASE= http://purl.obolibrary.org/obo
+URIBASE= http://vivoweb.org/ontology/core
+ONT= vivo
+ONTBASE= http://vivoweb.org/ontology/core/vivo
+EDIT_FORMAT= owl
+SRC = $(ONT)-edit.$(EDIT_FORMAT)
+MAKE_FAST= $(MAKE) IMP=false PAT=false COMP=false MIR=false
+CATALOG= catalog-v001.xml
+ROBOT= robot --catalog $(CATALOG)
+
+OWLTOOLS= owltools --use-catalog
+RELEASEDIR= ../..
+REPORTDIR= reports
+TEMPLATEDIR= ../templates
+TMPDIR= tmp
+MIRRORDIR= mirror
+IMPORTDIR= imports
+SUBSETDIR= subsets
+SCRIPTSDIR= ../scripts
+UPDATEREPODIR= target
+SPARQLDIR = ../sparql
+COMPONENTSDIR = components
+REPORT_FAIL_ON = None
+REPORT_LABEL = -l true
+REPORT_PROFILE_OPTS =
+OBO_FORMAT_OPTIONS =
+SPARQL_VALIDATION_CHECKS = owldef-self-reference iri-range label-with-iri multiple-replaced_by
+SPARQL_EXPORTS = basic-report class-count-by-prefix edges xrefs obsoletes synonyms
+ODK_VERSION_MAKEFILE = v1.4.3
+
+TODAY ?= $(shell date +%Y-%m-%d)
+OBODATE ?= $(shell date +'%d:%m:%Y %H:%M')
+VERSION= $(TODAY)
+ANNOTATE_ONTOLOGY_VERSION = annotate -V $(ONTBASE)/releases/$(VERSION)/$@ --annotation owl:versionInfo $(VERSION)
+ANNOTATE_CONVERT_FILE = annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) convert -f ofn --output $@.tmp.owl && mv $@.tmp.owl $@
+OTHER_SRC =
+ONTOLOGYTERMS = $(TMPDIR)/ontologyterms.txt
+EDIT_PREPROCESSED = $(TMPDIR)/$(ONT)-preprocess.owl
+
+FORMATS = $(sort owl ttl obo owl)
+FORMATS_INCL_TSV = $(sort $(FORMATS) tsv)
+RELEASE_ARTEFACTS = $(sort $(ONT)-full $(ONT)-base )
+
+# ----------------------------------------
+# Top-level targets
+# ----------------------------------------
+
+.PHONY: .FORCE
+
+.PHONY: all
+all: all_odk
+
+.PHONY: all_odk
+all_odk: odkversion test all_assets
+
+.PHONY: test
+test: odkversion reason_test sparql_test robot_reports $(REPORTDIR)/validate_profile_owl2dl_$(ONT).owl.txt
+ echo "Finished running all tests successfully."
+
+.PHONY: release_diff
+release_diff: $(REPORTDIR)/release-diff.md
+
+.PHONY: reason_test
+reason_test: $(EDIT_PREPROCESSED)
+ $(ROBOT) reason --input $< --reasoner ELK --equivalent-classes-allowed asserted-only \
+ --exclude-tautologies structural --output test.owl && rm test.owl
+
+.PHONY: odkversion
+odkversion:
+ echo "ODK Makefile version: $(ODK_VERSION_MAKEFILE) (this is the version of the ODK with which this Makefile was generated, \
+ not the version of the ODK you are running)" &&\
+ echo "ROBOT version (ODK): " && $(ROBOT) --version
+
+$(TMPDIR) $(REPORTDIR) $(MIRRORDIR) $(IMPORTDIR) $(COMPONENTSDIR) $(SUBSETDIR):
+ mkdir -p $@
+
+# ----------------------------------------
+# Release assets
+# ----------------------------------------
+
+MAIN_PRODUCTS = $(sort $(foreach r,$(RELEASE_ARTEFACTS), $(r)) $(ONT))
+MAIN_GZIPPED =
+MAIN_FILES = $(foreach n,$(MAIN_PRODUCTS), $(foreach f,$(FORMATS), $(n).$(f))) $(MAIN_GZIPPED)
+SRCMERGED = $(TMPDIR)/merged-$(SRC)
+
+.PHONY: all_main
+all_main: $(MAIN_FILES)
+
+# ----------------------------------------
+# Import assets
+# ----------------------------------------
+
+
+IMPORTS = bfo ro omo iao obi
+
+IMPORT_ROOTS = $(patsubst %, $(IMPORTDIR)/%_import, $(IMPORTS))
+IMPORT_OWL_FILES = $(foreach n,$(IMPORT_ROOTS), $(n).owl)
+IMPORT_FILES = $(IMPORT_OWL_FILES)
+
+
+.PHONY: all_imports
+all_imports: $(IMPORT_FILES)
+
+# ----------------------------------------
+# Subset assets
+# ----------------------------------------
+
+
+SUBSETS =
+
+SUBSET_ROOTS = $(patsubst %, $(SUBSETDIR)/%, $(SUBSETS))
+SUBSET_FILES = $(foreach n,$(SUBSET_ROOTS), $(foreach f,$(FORMATS_INCL_TSV), $(n).$(f)))
+
+.PHONY: all_subsets
+all_subsets: $(SUBSET_FILES)
+
+# ----------------------------------------
+# Mapping assets
+# ----------------------------------------
+
+
+MAPPINGS =
+
+MAPPING_FILES = $(patsubst %, $(MAPPINGDIR)/%.sssom.tsv, $(MAPPINGS))
+
+.PHONY: all_mappings
+all_mappings: $(MAPPING_FILES)
+
+
+# ----------------------------------------
+# QC Reports & Utilities
+# ----------------------------------------
+
+OBO_REPORT = $(SRC)-obo-report
+REPORTS = $(OBO_REPORT)
+REPORT_FILES = $(patsubst %, $(REPORTDIR)/%.tsv, $(REPORTS))
+
+.PHONY: robot_reports
+robot_reports: $(REPORT_FILES)
+
+.PHONY: all_reports
+all_reports: custom_reports robot_reports
+
+# ----------------------------------------
+# ROBOT OWL Profile checking
+# ----------------------------------------
+
+# The merge step is necessary to avoid undeclared entity violations.
+$(REPORTDIR)/validate_profile_owl2dl_%.txt: % | $(REPORTDIR) $(TMPDIR)
+ $(ROBOT) merge -i $< convert -f ofn -o $(TMPDIR)/validate.ofn
+ $(ROBOT) validate-profile --profile DL -i $(TMPDIR)/validate.ofn -o $@ || { cat $@ && exit 1; }
+.PRECIOUS: $(REPORTDIR)/validate_profile_owl2dl_%.txt
+
+validate_profile_%: $(REPORTDIR)/validate_profile_owl2dl_%.txt
+ echo "$* profile validation completed."
+
+# ----------------------------------------
+# Sparql queries: Q/C
+# ----------------------------------------
+
+# these live in the ../sparql directory, and have suffix -violation.sparql
+# adding the name here will make the violation check live.
+
+SPARQL_VALIDATION_QUERIES = $(foreach V,$(SPARQL_VALIDATION_CHECKS),$(SPARQLDIR)/$(V)-violation.sparql)
+
+sparql_test: $(EDIT_PREPROCESSED) catalog-v001.xml | $(REPORTDIR)
+ifneq ($(SPARQL_VALIDATION_QUERIES),)
+
+ $(ROBOT) verify --catalog catalog-v001.xml -i $(EDIT_PREPROCESSED) --queries $(SPARQL_VALIDATION_QUERIES) -O $(REPORTDIR)
+endif
+
+# ----------------------------------------
+# ROBOT report
+# ----------------------------------------
+
+$(REPORTDIR)/$(SRC)-obo-report.tsv: $(SRCMERGED) | $(REPORTDIR)
+ $(ROBOT) report -i $< $(REPORT_LABEL) $(REPORT_PROFILE_OPTS) --fail-on $(REPORT_FAIL_ON) --base-iri $(URIBASE)/VIVO_ --base-iri $(URIBASE)/vivo --print 5 -o $@
+
+$(REPORTDIR)/%-obo-report.tsv: % | $(REPORTDIR)
+ $(ROBOT) report -i $< $(REPORT_LABEL) $(REPORT_PROFILE_OPTS) --fail-on $(REPORT_FAIL_ON) --base-iri $(URIBASE)/VIVO_ --base-iri $(URIBASE)/vivo --print 5 -o $@
+
+# ----------------------------------------
+# Release assets
+# ----------------------------------------
+
+ASSETS = \
+ $(IMPORT_FILES) \
+ $(MAIN_FILES) \
+ $(REPORT_FILES) \
+ $(SUBSET_FILES) \
+ $(MAPPING_FILES)
+
+RELEASE_ASSETS = \
+ $(MAIN_FILES) \
+ $(SUBSET_FILES)
+
+.PHONY: all_assets
+all_assets: $(ASSETS)
+
+.PHONY: show_assets
+show_assets:
+ echo $(ASSETS)
+ du -sh $(ASSETS)
+
+check_rdfxml_%: %
+ @check-rdfxml $<
+
+.PHONY: check_rdfxml_assets
+check_rdfxml_assets: $(foreach product,$(MAIN_PRODUCTS),check_rdfxml_$(product).owl)
+
+# ----------------------------------------
+# Release Management
+# ----------------------------------------
+
+CLEANFILES=$(MAIN_FILES) $(SRCMERGED) $(EDIT_PREPROCESSED)
+# This should be executed by the release manager whenever time comes to make a release.
+# It will ensure that all assets/files are fresh, and will copy to release folder
+
+.PHONY: prepare_release
+prepare_release: all_odk
+ rsync -R $(RELEASE_ASSETS) $(RELEASEDIR) &&\
+ rm -f $(CLEANFILES) &&\
+ echo "Release files are now in $(RELEASEDIR) - now you should commit, push and make a release \
+ on your git hosting site such as GitHub or GitLab"
+
+.PHONY: prepare_initial_release
+prepare_initial_release: all_assets
+ rsync -R $(RELEASE_ASSETS) $(RELEASEDIR) &&\
+ rm -f $(patsubst %, ./%, $(CLEANFILES)) &&\
+ cd $(RELEASEDIR) && git add $(RELEASE_ASSETS)
+
+.PHONY: prepare_release_fast
+prepare_release_fast:
+ $(MAKE) prepare_release IMP=false PAT=false MIR=false COMP=false
+
+CURRENT_RELEASE=$(ONTBASE).owl
+
+$(TMPDIR)/current-release.owl:
+ wget $(CURRENT_RELEASE) -O $@
+
+$(REPORTDIR)/release-diff.md: $(ONT).owl $(TMPDIR)/current-release.owl
+ $(ROBOT) diff --labels true --left $(TMPDIR)/current-release.owl --right $(ONT).owl -f markdown -o $@
+
+# ------------------------
+# Imports: Seeding system
+# ------------------------
+
+# seed.txt contains all referenced entities
+IMPORTSEED=$(TMPDIR)/seed.txt
+PRESEED=$(TMPDIR)/pre_seed.txt
+
+$(SRCMERGED): $(EDIT_PREPROCESSED) $(OTHER_SRC)
+ $(ROBOT) remove --input $< --select imports --trim false \
+ merge $(patsubst %, -i %, $(OTHER_SRC)) -o $@
+
+$(EDIT_PREPROCESSED): $(SRC)
+ $(ROBOT) convert --input $< --format ofn --output $@
+
+$(PRESEED): $(SRCMERGED)
+ $(ROBOT) query -f csv -i $< --query ../sparql/terms.sparql $@.tmp &&\
+ cat $@.tmp | sort | uniq > $@
+
+
+
+ALLSEED = $(PRESEED) \
+
+
+$(IMPORTSEED): $(ALLSEED) | $(TMPDIR)
+ if [ $(IMP) = true ]; then cat $(ALLSEED) | sort | uniq > $@; fi
+
+ANNOTATION_PROPERTIES=rdfs:label IAO:0000115
+
+# ----------------------------------------
+# Import modules
+# ----------------------------------------
+# Most ontologies are modularly constructed using portions of other ontologies
+# These live in the imports/ folder
+# This pattern uses ROBOT to generate an import module
+
+# Should be able to drop this if robot can just take a big messy list of terms as input.
+$(IMPORTDIR)/%_terms_combined.txt: $(IMPORTSEED) $(IMPORTDIR)/%_terms.txt
+ if [ $(IMP) = true ]; then cat $^ | grep -v ^# | sort | uniq > $@; fi
+
+
+
+
+$(IMPORTDIR)/%_import.owl: $(MIRRORDIR)/%.owl $(IMPORTDIR)/%_terms_combined.txt
+ if [ $(IMP) = true ]; then $(ROBOT) query -i $< --update ../sparql/preprocess-module.ru \
+ extract -T $(IMPORTDIR)/$*_terms_combined.txt --force true --copy-ontology-annotations true --individuals include --method BOT \
+ query --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \
+ $(ANNOTATE_CONVERT_FILE); fi
+
+.PRECIOUS: $(IMPORTDIR)/%_import.owl
+
+## Module for ontology: bfo
+
+$(IMPORTDIR)/bfo_import.owl: $(MIRRORDIR)/bfo.owl $(IMPORTDIR)/bfo_terms_combined.txt
+ if [ $(IMP) = true ]; then $(ROBOT) merge -i $< query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \
+ $(ANNOTATE_CONVERT_FILE); fi
+
+## Module for ontology: omo
+
+$(IMPORTDIR)/omo_import.owl: $(MIRRORDIR)/omo.owl $(IMPORTDIR)/omo_terms_combined.txt
+ if [ $(IMP) = true ]; then $(ROBOT) merge -i $< query --update ../sparql/preprocess-module.ru --update ../sparql/inject-subset-declaration.ru --update ../sparql/inject-synonymtype-declaration.ru --update ../sparql/postprocess-module.ru \
+ $(ANNOTATE_CONVERT_FILE); fi
+
+
+.PHONY: refresh-imports
+refresh-imports:
+ $(MAKE) IMP=true MIR=true PAT=false IMP_LARGE=true all_imports -B
+
+.PHONY: no-mirror-refresh-imports
+no-mirror-refresh-imports:
+ $(MAKE) IMP=true MIR=false PAT=false IMP_LARGE=true all_imports -B
+
+.PHONY: refresh-imports-excluding-large
+refresh-imports-excluding-large:
+ $(MAKE) IMP=true MIR=true PAT=false IMP_LARGE=false all_imports -B
+
+.PHONY: refresh-%
+refresh-%:
+ $(MAKE) IMP=true IMP_LARGE=true MIR=true PAT=false $(IMPORTDIR)/$*_import.owl -B
+
+.PHONY: no-mirror-refresh-%
+no-mirror-refresh-%:
+ $(MAKE) IMP=true IMP_LARGE=true MIR=false PAT=false $(IMPORTDIR)/$*_import.owl -B
+
+# ----------------------------------------
+# Mirroring upstream ontologies
+# ----------------------------------------
+
+IMP=true # Global parameter to bypass import generation
+MIR=true # Global parameter to bypass mirror generation
+IMP_LARGE=true # Global parameter to bypass handling of large imports
+
+
+
+## ONTOLOGY: bfo
+.PHONY: mirror-bfo
+.PRECIOUS: $(MIRRORDIR)/bfo.owl
+mirror-bfo: | $(TMPDIR)
+ if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/bfo.owl --create-dirs -o $(MIRRORDIR)/bfo.owl --retry 4 --max-time 200 &&\
+ $(ROBOT) convert -i $(MIRRORDIR)/bfo.owl -o $@.tmp.owl &&\
+ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi
+
+
+## ONTOLOGY: ro
+.PHONY: mirror-ro
+.PRECIOUS: $(MIRRORDIR)/ro.owl
+mirror-ro: | $(TMPDIR)
+ if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/ro.owl --create-dirs -o $(MIRRORDIR)/ro.owl --retry 4 --max-time 200 &&\
+ $(ROBOT) convert -i $(MIRRORDIR)/ro.owl -o $@.tmp.owl &&\
+ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi
+
+
+## ONTOLOGY: omo
+.PHONY: mirror-omo
+.PRECIOUS: $(MIRRORDIR)/omo.owl
+mirror-omo: | $(TMPDIR)
+ if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/omo.owl --create-dirs -o $(MIRRORDIR)/omo.owl --retry 4 --max-time 200 &&\
+ $(ROBOT) convert -i $(MIRRORDIR)/omo.owl -o $@.tmp.owl &&\
+ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi
+
+
+## ONTOLOGY: iao
+.PHONY: mirror-iao
+.PRECIOUS: $(MIRRORDIR)/iao.owl
+mirror-iao: | $(TMPDIR)
+ if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/iao.owl --create-dirs -o $(MIRRORDIR)/iao.owl --retry 4 --max-time 200 &&\
+ $(ROBOT) convert -i $(MIRRORDIR)/iao.owl -o $@.tmp.owl &&\
+ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi
+
+
+## ONTOLOGY: obi
+.PHONY: mirror-obi
+.PRECIOUS: $(MIRRORDIR)/obi.owl
+mirror-obi: | $(TMPDIR)
+ if [ $(MIR) = true ] && [ $(IMP) = true ]; then curl -L $(OBOBASE)/obi.owl --create-dirs -o $(MIRRORDIR)/obi.owl --retry 4 --max-time 200 &&\
+ $(ROBOT) convert -i $(MIRRORDIR)/obi.owl -o $@.tmp.owl &&\
+ mv $@.tmp.owl $(TMPDIR)/$@.owl; fi
+
+
+$(MIRRORDIR)/%.owl: mirror-% | $(MIRRORDIR)
+ if [ $(IMP) = true ] && [ $(MIR) = true ] && [ -f $(TMPDIR)/mirror-$*.owl ]; then if cmp -s $(TMPDIR)/mirror-$*.owl $@ ; then echo "Mirror identical, ignoring."; else echo "Mirrors different, updating." &&\
+ cp $(TMPDIR)/mirror-$*.owl $@; fi; fi
+
+
+
+
+# ----------------------------------------
+# Subsets
+# ----------------------------------------
+$(SUBSETDIR)/%.tsv: $(SUBSETDIR)/%.owl
+ $(ROBOT) query -f tsv -i $< -s ../sparql/labels.sparql $@
+.PRECIOUS: $(SUBSETDIR)/%.tsv
+
+$(SUBSETDIR)/%.owl: $(ONT).owl | $(SUBSETDIR)
+ $(OWLTOOLS) $< --extract-ontology-subset --fill-gaps --subset $* -o $@.tmp.owl && mv $@.tmp.owl $@ &&\
+ $(ROBOT) annotate --input $@ --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) -o $@.tmp.owl && mv $@.tmp.owl $@
+.PRECIOUS: $(SUBSETDIR)/%.owl
+
+
+$(SUBSETDIR)/%.obo: $(SUBSETDIR)/%.owl
+ $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo
+
+$(SUBSETDIR)/%.ttl: $(SUBSETDIR)/%.owl
+ $(ROBOT) convert --input $< --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@
+
+
+# ---------------------------------------------
+# Sparql queries: Table exports / Query Reports
+# ---------------------------------------------
+
+SPARQL_EXPORTS_ARGS = $(foreach V,$(SPARQL_EXPORTS),-s $(SPARQLDIR)/$(V).sparql $(REPORTDIR)/$(V).tsv)
+# This combines all into one single command
+
+.PHONY: custom_reports
+custom_reports: $(EDIT_PREPROCESSED) | $(REPORTDIR)
+ifneq ($(SPARQL_EXPORTS_ARGS),)
+ $(ROBOT) query -f tsv --use-graphs true -i $< $(SPARQL_EXPORTS_ARGS)
+endif
+
+# ----------------------------------------
+# Release artefacts: export formats
+# ----------------------------------------
+
+
+$(ONT)-full.obo: $(ONT)-full.owl
+ $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo
+$(ONT)-full.ttl: $(ONT)-full.owl
+ $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \
+ convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@
+$(ONT)-base.obo: $(ONT)-base.owl
+ $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo
+$(ONT)-base.ttl: $(ONT)-base.owl
+ $(ROBOT) annotate --input $< --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \
+ convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@
+# ----------------------------------------
+# Release artefacts: main release artefacts
+# ----------------------------------------
+
+$(ONT).owl: $(ONT)-full.owl
+ $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \
+ convert -o $@.tmp.owl && mv $@.tmp.owl $@
+
+$(ONT).obo: $(ONT).owl
+ $(ROBOT) convert --input $< --check false -f obo $(OBO_FORMAT_OPTIONS) -o $@.tmp.obo && grep -v ^owl-axioms $@.tmp.obo > $@ && rm $@.tmp.obo
+$(ONT).ttl: $(ONT).owl
+ $(ROBOT) annotate --input $< --ontology-iri $(URIBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \
+ convert --check false -f ttl -o $@.tmp.ttl && mv $@.tmp.ttl $@
+# -----------------------------------------------------
+# Release artefacts: variants (base, full, simple, etc)
+# -----------------------------------------------------
+SHARED_ROBOT_COMMANDS =
+
+$(ONTOLOGYTERMS): $(SRCMERGED)
+ $(ROBOT) query -f csv -i $< --query ../sparql/vivo_terms.sparql $@
+
+# ROBOT pipeline that merges imports, including components.
+ROBOT_RELEASE_IMPORT_MODE=$(ROBOT) merge --input $<
+
+# ROBOT pipeline that removes imports, then merges components. This is for release artefacts that start from "base"
+ROBOT_RELEASE_IMPORT_MODE_BASE=$(ROBOT) remove --input $< --select imports --trim false merge $(patsubst %, -i %, $(OTHER_SRC))
+
+# base: All the axioms as they are editted by the editors, excluding reasoning
+$(ONT)-base.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC)
+ $(ROBOT_RELEASE_IMPORT_MODE_BASE) \
+ $(SHARED_ROBOT_COMMANDS) \
+ annotate --link-annotation http://purl.org/dc/elements/1.1/type http://purl.obolibrary.org/obo/IAO_8000001 \
+ --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) \
+ --output $@.tmp.owl && mv $@.tmp.owl $@
+# Full: The full artefacts with imports merged, reasoned.
+$(ONT)-full.owl: $(EDIT_PREPROCESSED) $(OTHER_SRC) $(IMPORT_FILES)
+ $(ROBOT_RELEASE_IMPORT_MODE) \
+ reason --reasoner ELK --equivalent-classes-allowed asserted-only --exclude-tautologies structural \
+ relax \
+ reduce -r ELK \
+ $(SHARED_ROBOT_COMMANDS) annotate --ontology-iri $(ONTBASE)/$@ $(ANNOTATE_ONTOLOGY_VERSION) --output $@.tmp.owl && mv $@.tmp.owl $@
+# ----------------------------------------
+# Debugging Tools
+# ----------------------------------------
+
+explain_unsat: $(EDIT_PREPROCESSED)
+ $(ROBOT) explain -i $< -M unsatisfiability --unsatisfiable random:10 --explanation $(TMPDIR)/$@.md
+
+
+
+RELEASE_ASSETS_AFTER_RELEASE=$(foreach n,$(RELEASE_ASSETS), ../../$(n))
+GHVERSION=v$(VERSION)
+
+.PHONY: public_release
+public_release:
+ @test $(GHVERSION)
+ ls -alt $(RELEASE_ASSETS_AFTER_RELEASE)
+ gh release create $(GHVERSION) --title "$(VERSION) Release" --draft $(RELEASE_ASSETS_AFTER_RELEASE) --generate-notes
+
+# ----------------------------------------
+# General Validation
+# ----------------------------------------
+TSV=
+ALL_TSV_FILES=
+
+validate-tsv: $(TSV) | $(TMPDIR)
+ for FILE in $< ; do \
+ tsvalid $$FILE > $(TMPDIR)/validate.txt; \
+ if [ -s $(TMPDIR)/validate.txt ]; then cat $(TMPDIR)/validate.txt && exit 1; fi ; \
+ done
+
+validate-all-tsv: $(ALL_TSV_FILES)
+ $(MAKE) validate-tsv TSV="$^"
+
+# ----------------------------------------
+# Editors Utilities
+# ----------------------------------------
+
+
+
+.PHONY: normalize_src
+normalize_src: $(SRC)
+ $(ROBOT) convert -i $< -f ofn -o $(TMPDIR)/normalise && mv $(TMPDIR)/normalise $<
+
+.PHONY: validate_idranges
+validate_idranges:
+ amm $(SCRIPTSDIR)/validate_id_ranges.sc vivo-idranges.owl
+
+.PHONY: update_repo
+update_repo:
+ sh $(SCRIPTSDIR)/update_repo.sh
+
+
+update_docs:
+ mkdocs gh-deploy --config-file ../../mkdocs.yaml
+
+# Note to future generations: prepending ./ is a safety measure to ensure that
+# the environment does not malicously set `CLEANFILES` to `\`.
+.PHONY: clean
+clean:
+ [ -n "$(MIRRORDIR)" ] && [ $(MIRRORDIR) != "." ] && [ $(MIRRORDIR) != "/" ] && [ $(MIRRORDIR) != ".." ] && [ -d ./$(MIRRORDIR) ] && rm -rf ./$(MIRRORDIR)/*
+ [ -n "$(TMPDIR)" ] && [ $(TMPDIR) != "." ] && [ $(TMPDIR) != "/" ] && [ $(TMPDIR) != ".." ] && [ -d ./$(TMPDIR) ] && rm -rf ./$(TMPDIR)/*
+ [ -n "$(UPDATEREPODIR)" ] && [ $(UPDATEREPODIR) != "." ] && [ $(UPDATEREPODIR) != "/" ] && [ $(UPDATEREPODIR) != ".." ] && [ -d ./$(UPDATEREPODIR) ] && rm -rf ./$(UPDATEREPODIR)/*
+ rm -f $(CLEANFILES)
+
+.PHONY: help
+help:
+ @echo "$$data"
+
+define data
+Usage: [IMAGE=(odklite|odkfull)] [ODK_DEBUG=yes] sh run.sh make [(IMP|MIR|IMP_LARGE|PAT)=(false|true)] command
+
+----------------------------------------
+ Command reference
+----------------------------------------
+
+Core commands:
+* prepare_release: Run the entire release pipeline. Use make IMP=false prepare_release to avoid rerunning the imports
+* prepare_release_fast: Run the entire release pipeline without refreshing imports, recreating components or recompiling patterns.
+* update_repo: Update the ODK repository setup using the config file vivo-odk.yaml
+* test: Running all validation tests
+* odkversion: Show the current version of the ODK Makefile and ROBOT.
+* clean: Delete all temporary files
+* help: Print ODK Usage information
+* public_release: Uploads the release file to a release management system, such as GitHub releases. Must be configured.
+
+
+Imports management:
+* refresh-imports: Refresh all imports and mirrors.
+* recreate-components: Recreate all components.
+* no-mirror-refresh-imports: Refresh all imports without downloading mirrors.
+* refresh-imports-excluding-large: Refresh all imports and mirrors, but skipping the ones labelled as 'is_large'.
+* refresh-%: Refresh a single import, i.e. refresh-go will refresh 'imports/go_import.owl'.
+* no-mirror-refresh-%: Refresh a single import without updating the mirror, i.e. refresh-go will refresh 'imports/go_import.owl'.
+* mirror-%: Refresh a single mirror.
+
+Editor utilities:
+* validate_idranges: Make sure your ID ranges file is formatted correctly
+* normalize_src: Load and safe your vivo-edit file after you to make sure its serialised correctly
+* explain_unsat: If you have unsatisfiable classes, this command will create a markdown file (tmp/explain_unsat.md) which will explain all your unsatisfiable classes
+* validate-all-tsv: Check all your tsv files for possible problems in syntax. Use ALL_TSV_FILES variable to list files
+* validate-tsv: Check a tsv file for syntactic problems with tsvalid. Use TSV variable to pass filepath, e.g. make TSV=../my.tsv validate-tsv.
+* release_diff: Create a diff between the current release and the new release
+
+Additional build commands (advanced users)
+* all: Run the entire pipeline (like prepare_release), but without copying the release files to the release directory.
+* all_subsets: Build all subsets
+* custom_reports: Generate all custom sparql reports you have configured in your vivo-odk.yaml file.
+* all_assets: Build all assets
+* show_assets: Print a list of all assets that would be build by the release pipeline
+* all_mappings: Update all SSSOM mapping sets
+
+Additional QC commands (advanced users)
+* robot_reports: Run all configured ROBOT reports
+* validate_profile_%: Run an OWL2 DL profile validation check, for example validate_profile_vivo-edit.owl.
+* reason_test: Run a basic reasoning test
+
+Examples:
+* sh run.sh make IMP=false prepare_release
+* sh run.sh make update_repo
+* sh run.sh make test
+
+Tricks:
+* Add -B to the end of your command to force re-running it even if nothing has changed
+* Use the IMAGE parameter to the run.sh script to use a different image like odklite
+* Use ODK_DEBUG=yes sh run.sh make ... to print information about timing and debugging
+
+endef
+export data
+
+include vivo.Makefile
\ No newline at end of file
diff --git a/src/ontology/README-editors.md b/src/ontology/README-editors.md
new file mode 100644
index 0000000..7e61bcb
--- /dev/null
+++ b/src/ontology/README-editors.md
@@ -0,0 +1,27 @@
+These notes are for the EDITORS of vivo
+
+This project was created using the [ontology development kit](https://github.com/INCATools/ontology-development-kit). See the site for details.
+
+For more details on ontology management, please see the
+[OBO Academy Tutorials](https://oboacademy.github.io/obook/), the
+[OBO tutorial](https://github.com/jamesaoverton/obo-tutorial) or the [Gene Ontology Editors Tutorial](https://go-protege-tutorial.readthedocs.io/en/latest/)
+
+This documentation has been superceded by the ODK automatic documentation, which you can
+activate by adding:
+
+```
+documentation:
+ documentation_system: mkdocs
+```
+
+to your Makefile and running:
+
+```
+sh run.sh make update_repo
+```
+(Unix)
+
+```
+run.bat make update_repo
+```
+(Windows)
\ No newline at end of file
diff --git a/src/ontology/catalog-v001.xml b/src/ontology/catalog-v001.xml
new file mode 100644
index 0000000..7e40cd9
--- /dev/null
+++ b/src/ontology/catalog-v001.xml
@@ -0,0 +1,26 @@
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/src/ontology/imports/bfo_import.owl b/src/ontology/imports/bfo_import.owl
new file mode 100644
index 0000000..d3e3fa9
--- /dev/null
+++ b/src/ontology/imports/bfo_import.owl
@@ -0,0 +1,752 @@
+Prefix(:=)
+Prefix(owl:=)
+Prefix(rdf:=)
+Prefix(xml:=)
+Prefix(xsd:=)
+Prefix(rdfs:=)
+
+
+Ontology(
+
+Annotation()
+Annotation(owl:versionInfo "2024-02-21")
+
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(Class())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty(rdfs:isDefinedBy))
+Declaration(AnnotationProperty(rdfs:seeAlso))
+Declaration(AnnotationProperty())
+Declaration(AnnotationProperty())
+############################
+# Annotation Properties
+############################
+
+# Annotation Property: (BFO OWL specification label)
+
+AnnotationAssertion( "Relates an entity in the ontology to the name of the variable that is used to represent it in the code that generates the BFO OWL file from the lispy specification."@en)
+AnnotationAssertion( "Really of interest to developers only"@en)
+AnnotationAssertion(rdfs:label "BFO OWL specification label"@en)
+SubAnnotationPropertyOf( rdfs:label)
+
+# Annotation Property: (BFO CLIF specification label)
+
+AnnotationAssertion( "Relates an entity in the ontology to the term that is used to represent it in the the CLIF specification of BFO2"@en)
+AnnotationAssertion( "Person:Alan Ruttenberg")
+AnnotationAssertion( "Really of interest to developers only"@en)
+AnnotationAssertion(rdfs:label "BFO CLIF specification label"@en)
+SubAnnotationPropertyOf( rdfs:label)
+
+# Annotation Property: (editor preferred term)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "editor preferred term"@en)
+
+# Annotation Property: (example of usage)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "example of usage"@en)
+
+# Annotation Property: (definition)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "definition"@en)
+
+# Annotation Property: (editor note)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "editor note"@en)
+
+# Annotation Property: (term editor)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "term editor"@en)
+
+# Annotation Property: (alternative term)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "alternative term"@en)
+
+# Annotation Property: (definition source)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "definition source"@en)
+
+# Annotation Property: (curator note)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "curator note"@en)
+
+# Annotation Property: (imported from)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "imported from"@en)
+
+# Annotation Property: (elucidation)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "elucidation"@en)
+
+# Annotation Property: (has associated axiom(nl))
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "has associated axiom(nl)"@en)
+
+# Annotation Property: (has associated axiom(fol))
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "has associated axiom(fol)"@en)
+
+# Annotation Property: (has axiom label)
+
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "has axiom label"@en)
+
+
+
+############################
+# Classes
+############################
+
+# Class: (entity)
+
+AnnotationAssertion( "entity")
+AnnotationAssertion( "Entity")
+AnnotationAssertion( "Julius Caesar"@en)
+AnnotationAssertion( "Verdi’s Requiem"@en)
+AnnotationAssertion( "the Second World War"@en)
+AnnotationAssertion( "your body mass index"@en)
+AnnotationAssertion( "BFO 2 Reference: In all areas of empirical inquiry we encounter general terms of two sorts. First are general terms which refer to universals or types:animaltuberculosissurgical procedurediseaseSecond, are general terms used to refer to groups of entities which instantiate a given universal but do not correspond to the extension of any subuniversal of that universal because there is nothing intrinsic to the entities in question by virtue of which they – and only they – are counted as belonging to the given group. Examples are: animal purchased by the Emperortuberculosis diagnosed on a Wednesdaysurgical procedure performed on a patient from Stockholmperson identified as candidate for clinical trial #2056-555person who is signatory of Form 656-PPVpainting by Leonardo da VinciSuch terms, which represent what are called ‘specializations’ in [81"@en)
+AnnotationAssertion(Annotation() Annotation(rdfs:comment "per discussion with Barry Smith") Annotation(rdfs:seeAlso ) "Entity doesn't have a closure axiom because the subclasses don't necessarily exhaust all possibilites. For example Werner Ceusters 'portions of reality' include 4 sorts, entities (as BFO construes them), universals, configurations, and relations. It is an open question as to whether entities as construed in BFO will at some point also include these other portions of reality. See, for example, 'How to track absolutely everything' at http://www.referent-tracking.com/_RTU/papers/CeustersICbookRevised.pdf"@en)
+AnnotationAssertion(Annotation() "An entity is anything that exists or has existed or will exist. (axiom label in BFO2 Reference: [001-001])"@en)
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "entity"@en)
+SubClassOf( owl:Thing)
+
+# Class: (continuant)
+
+AnnotationAssertion( "continuant")
+AnnotationAssertion( "Continuant")
+AnnotationAssertion( "BFO 2 Reference: Continuant entities are entities which can be sliced to yield parts only along the spatial dimension, yielding for example the parts of your table which we call its legs, its top, its nails. ‘My desk stretches from the window to the door. It has spatial parts, and can be sliced (in space) in two. With respect to time, however, a thing is a continuant.’ [60, p. 240"@en)
+AnnotationAssertion(Annotation() "Continuant doesn't have a closure axiom because the subclasses don't necessarily exhaust all possibilites. For example, in an expansion involving bringing in some of Ceuster's other portions of reality, questions are raised as to whether universals are continuants"@en)
+AnnotationAssertion(Annotation() "A continuant is an entity that persists, endures, or continues to exist through time while maintaining its identity. (axiom label in BFO2 Reference: [008-002])"@en)
+AnnotationAssertion(Annotation() "if b is a continuant and if, for some t, c has_continuant_part b at t, then c is a continuant. (axiom label in BFO2 Reference: [126-001])"@en)
+AnnotationAssertion(Annotation() "if b is a continuant and if, for some t, cis continuant_part of b at t, then c is a continuant. (axiom label in BFO2 Reference: [009-002])"@en)
+AnnotationAssertion(Annotation() "if b is a material entity, then there is some temporal interval (referred to below as a one-dimensional temporal region) during which b exists. (axiom label in BFO2 Reference: [011-002])"@en)
+AnnotationAssertion(Annotation() "(forall (x y) (if (and (Continuant x) (exists (t) (continuantPartOfAt y x t))) (Continuant y))) // axiom label in BFO2 CLIF: [009-002] ")
+AnnotationAssertion(Annotation() "(forall (x y) (if (and (Continuant x) (exists (t) (hasContinuantPartOfAt y x t))) (Continuant y))) // axiom label in BFO2 CLIF: [126-001] ")
+AnnotationAssertion(Annotation() "(forall (x) (if (Continuant x) (Entity x))) // axiom label in BFO2 CLIF: [008-002] ")
+AnnotationAssertion(Annotation() "(forall (x) (if (Material Entity x) (exists (t) (and (TemporalRegion t) (existsAt x t))))) // axiom label in BFO2 CLIF: [011-002] ")
+AnnotationAssertion(rdfs:isDefinedBy )
+AnnotationAssertion(rdfs:label "continuant"@en)
+SubClassOf()
+DisjointClasses()
+
+# Class: (occurrent)
+
+AnnotationAssertion( "occurrent")
+AnnotationAssertion( "Occurrent")
+AnnotationAssertion( "BFO 2 Reference: every occurrent that is not a temporal or spatiotemporal region is s-dependent on some independent continuant that is not a spatial region"@en)
+AnnotationAssertion( "BFO 2 Reference: s-dependence obtains between every process and its participants in the sense that, as a matter of necessity, this process could not have existed unless these or those participants existed also. A process may have a succession of participants at different phases of its unfolding. Thus there may be different players on the field at different times during the course of a football game; but the process which is the entire game s-depends_on all of these players nonetheless. Some temporal parts of this process will s-depend_on on only some of the players."@en)
+AnnotationAssertion(Annotation() Annotation(rdfs:comment "per discussion with Barry Smith") "Occurrent doesn't have a closure axiom because the subclasses don't necessarily exhaust all possibilites. An example would be the sum of a process and the process boundary of another process."@en)
+AnnotationAssertion(Annotation() "Simons uses different terminology for relations of occurrents to regions: Denote the spatio-temporal location of a given occurrent e by 'spn[e]' and call this region its span. We may say an occurrent is at its span, in any larger region, and covers any smaller region. Now suppose we have fixed a frame of reference so that we can speak not merely of spatio-temporal but also of spatial regions (places) and temporal regions (times). The spread of an occurrent, (relative to a frame of reference) is the space it exactly occupies, and its spell is likewise the time it exactly occupies. We write 'spr[e]' and `spl[e]' respectively for the spread and spell of e, omitting mention of the frame.")
+AnnotationAssertion(Annotation() "An occurrent is an entity that unfolds itself in time or it is the instantaneous boundary of such an entity (for example a beginning or an ending) or it is a temporal or spatiotemporal region which such an entity occupies_temporal_region or occupies_spatiotemporal_region. (axiom label in BFO2 Reference: [077-002])"@en)
+AnnotationAssertion(Annotation() "Every occurrent occupies_spatiotemporal_region some spatiotemporal region. (axiom label in BFO2 Reference: [108-001])"@en)
+AnnotationAssertion(Annotation() "b is an occurrent entity iff b is an entity that has temporal parts. (axiom label in BFO2 Reference: [079-001])"@en)
+AnnotationAssertion(Annotation() "(forall (x) (if (Occurrent x) (exists (r) (and (SpatioTemporalRegion r) (occupiesSpatioTemporalRegion x r))))) // axiom label in BFO2 CLIF: [108-001] ")
+AnnotationAssertion(Annotation() "(forall (x) (iff (Occurrent x) (and (Entity x) (exists (y) (temporalPartOf y x))))) // axiom label in BFO2 CLIF: [079-001] ")
+AnnotationAssertion(rdfs:isDefinedBy