Skip to content

Commit

Permalink
chore(docs): check formatting of docs in ci and do initial formatting (
Browse files Browse the repository at this point in the history
…#3734)

* chore(ci): check formatting of docs in ci

Closes #2419

* Reduce diff

* Format

* Make prettier respect the .gitignore file as well

* make md and mdx files indent at 2

* format after rebase (main was unformatted)

* Update docs/package.json

Co-authored-by: Cornelius Roemer <[email protected]>

---------

Co-authored-by: Felix Hennig <[email protected]>
Co-authored-by: Felix Hennig <[email protected]>
  • Loading branch information
3 people authored Feb 26, 2025
1 parent 592f128 commit 38458a8
Show file tree
Hide file tree
Showing 16 changed files with 270 additions and 253 deletions.
3 changes: 3 additions & 0 deletions .github/workflows/docs-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,3 +31,6 @@ jobs:
- name: Check Build
run: npm run build
working-directory: ./docs
- name: Check Format
run: npm run check-format
working-directory: ./docs
6 changes: 6 additions & 0 deletions docs/.prettierrc
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,12 @@
"options": {
"parser": "astro"
}
},
{
"files": ["*.md", "*.mdx"],
"options": {
"tabWidth": 2
}
}
]
}
2 changes: 2 additions & 0 deletions docs/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,8 @@ All commands are run from the root of the project, from a terminal:
| :------------------------ | :----------------------------------------------- |
| `npm install` | Installs dependencies |
| `npm run dev` | Starts local dev server at `localhost:4321` |
| `npm run format` | Formats your code using Prettier |
| `npm run check-format` | Checks if your code is formatted correctly |
| `npm run build` | Build your production site to `./dist/` |
| `npm run preview` | Preview your build locally, before deploying |
| `npm run astro ...` | Run CLI commands like `astro add`, `astro check` |
Expand Down
4 changes: 2 additions & 2 deletions docs/astro.config.mjs
Original file line number Diff line number Diff line change
Expand Up @@ -70,8 +70,8 @@ export default defineConfig({
items: [
'for-administrators/pipeline-concept',
'for-administrators/existing-preprocessing-pipelines',
'for-administrators/build-new-preprocessing-pipeline'
]
'for-administrators/build-new-preprocessing-pipeline',
],
},
{ label: 'Data use terms', link: '/for-administrators/data-use-terms/' },
{ label: 'User administration', link: '/for-administrators/user-administration/' },
Expand Down
3 changes: 2 additions & 1 deletion docs/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,8 @@
"build": "astro check && astro build",
"preview": "astro preview",
"astro": "astro",
"format": "prettier --ignore-path \"../.gitignore\" --write \"./**/*.{ts,tsx,json,astro,md,mdx,mjs,cjs}\""
"check-format": "prettier --ignore-path \"../.gitignore\" --ignore-path .gitignore --check \"./**/*.{ts,tsx,json,astro,md,mdx,mjs,cjs}\"",
"format": "prettier --ignore-path \"../.gitignore\" --ignore-path .gitignore --write \"./**/*.{ts,tsx,json,astro,md,mdx,mjs,cjs}\""
},
"dependencies": {
"@astrojs/check": "^0.9.4",
Expand Down
9 changes: 3 additions & 6 deletions docs/src/components/MarkdownRenderer.astro
Original file line number Diff line number Diff line change
Expand Up @@ -9,10 +9,7 @@ import rehypeStringify from 'rehype-stringify';
const { content } = Astro.props;
const html = String(await unified()
.use(remarkParse)
.use(remarkRehype)
.use(rehypeStringify)
.process(content));
const html = String(await unified().use(remarkParse).use(remarkRehype).use(rehypeStringify).process(content));
---
<div set:html={html}></div>

<div set:html={html} />
72 changes: 38 additions & 34 deletions docs/src/components/SchemaDocs.astro
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,12 @@
* Properties in the schema can be assigned to groups by adding a new "groups": ["group1", "group2"] key to them.
* This way, you can split schema definition into semantic groups.
*/
import { type JSONSchema7, type JSONSchema7Definition, type JSONSchema7TypeName, type JSONSchema7Type } from 'json-schema';
import {
type JSONSchema7,
type JSONSchema7Definition,
type JSONSchema7TypeName,
type JSONSchema7Type,
} from 'json-schema';
import MarkdownRenderer from './MarkdownRenderer.astro';
import rawSchema from '../values.schema.json';
Expand All @@ -26,24 +31,24 @@ function capitalizeFirst(str: string) {
*/
function typeToString(
type: JSONSchema7TypeName | JSONSchema7TypeName[] | undefined,
enumvals: JSONSchema7Type[] | undefined
enumvals: JSONSchema7Type[] | undefined,
) {
if (type === undefined) return "";
if (type === undefined) return '';
if (Array.isArray(type)) {
return type.map(t => capitalizeFirst(String(t))).join(", ")
return type.map((t) => capitalizeFirst(String(t))).join(', ');
}
if (type === "string" && enumvals !== undefined) {
return enumvals?.map(enumval => String(enumval)).join(", ")
if (type === 'string' && enumvals !== undefined) {
return enumvals?.map((enumval) => String(enumval)).join(', ');
}
return capitalizeFirst(String(type));
}
/** A row in the table. */
interface Row {
key: string,
type?: string,
default?: string,
description?: string
key: string;
type?: string;
default?: string;
description?: string;
}
const rows: Row[] = [];
Expand All @@ -55,44 +60,40 @@ const rows: Row[] = [];
* @param definition The definition of the property currently observed.
*/
function addSelfAndChildren(prefix: string, key: string, definition: JSONSchema7Definition) {
if (
typeof definition === 'object' &&
definition !== null
) {
if (typeof definition === 'object' && definition !== null) {
if ('placeholder' in definition) {
key = `<${definition.placeholder}>`;
}
if ('docsIncludePrefix' in definition && definition.docsIncludePrefix === false) {
prefix = "";
prefix = '';
}
if (
'groups' in definition &&
Array.isArray(definition.groups) &&
definition.groups.includes(group)
) {
var def = definition.default !== undefined ? String(definition.default) : "";
if (definition.type === "string" && def !== "") {
def = `"${def}"`
if ('groups' in definition && Array.isArray(definition.groups) && definition.groups.includes(group)) {
var def = definition.default !== undefined ? String(definition.default) : '';
if (definition.type === 'string' && def !== '') {
def = `"${def}"`;
}
rows.push({
key: `${prefix}${key}`,
type: typeToString(definition.type, definition.enum),
default: def,
description: definition.description
})
description: definition.description,
});
}
if ('properties' in definition && definition.properties) {
Object.entries(definition.properties).forEach(([k, d]) => addSelfAndChildren(`${prefix}${key}.`, k, d));
}
if ('patternProperties' in definition && definition.patternProperties) {
Object.entries(definition.patternProperties).forEach(([k, d]) => addSelfAndChildren(`${prefix}${key}.`, k, d));
Object.entries(definition.patternProperties).forEach(([k, d]) =>
addSelfAndChildren(`${prefix}${key}.`, k, d),
);
}
if ('items' in definition && definition.items !== undefined && typeof definition.items === 'object') {
const items = definition.items;
if ('length' in items) { // filter out arrays
if ('length' in items) {
// filter out arrays
return;
}
addSelfAndChildren(`${prefix}${key}.`, "[]", items)
addSelfAndChildren(`${prefix}${key}.`, '[]', items);
}
}
}
Expand All @@ -101,7 +102,7 @@ if (schema.definitions) {
Object.entries(schema.definitions).forEach(([_, definition]) => {
if (typeof definition === 'object' && definition.properties) {
Object.entries(definition.properties).forEach(([key, definition]) => {
addSelfAndChildren("", key, definition);
addSelfAndChildren('', key, definition);
});
}
});
Expand All @@ -110,10 +111,9 @@ if (schema.definitions) {
// start of recursing with the top level properties in the schema.
if (schema.properties) {
Object.entries(schema.properties).forEach(([key, definition]) => {
addSelfAndChildren("", key, definition);
addSelfAndChildren('', key, definition);
});
}
---

<div class='overflow-x-scroll'>
Expand All @@ -128,12 +128,16 @@ if (schema.properties) {
</thead>
<tbody>
{
rows.map(row => (
rows.map((row) => (
<tr>
<td><code>{row.key}</code></td>
<td>
<code>{row.key}</code>
</td>
<td>{row.type}</td>
<td>{row.default}</td>
<td><MarkdownRenderer content={row.description} /></td>
<td>
<MarkdownRenderer content={row.description} />
</td>
</tr>
))
}
Expand Down
6 changes: 5 additions & 1 deletion docs/src/content/docs/for-administrators/data-use-terms.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,11 @@ description: What's the data use terms concept of Loculus and how to configure i

import { Aside } from '@astrojs/starlight/components';

<Aside type="caution">The data use terms currently hardcode textual descriptions and behaviour designed around the Pathoplexus database. You can disable the data use terms entirely, as below. If you require custom data use terms handling please feel free to reach out to us by raising a Github issue.</Aside>
<Aside type='caution'>
The data use terms currently hardcode textual descriptions and behaviour designed around the Pathoplexus database. You
can disable the data use terms entirely, as below. If you require custom data use terms handling please feel free to
reach out to us by raising a Github issue.
</Aside>

Loculus comes with built-in handling of data use terms for submitted data, which means that data can either be _open_ or _restricted_. You can define, what restricted means yourself. Users can submit data as restricted, but they have to give a date at which point the sequences become open, this date can at most be one year from the submission date.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,8 @@ Given a nextclade dataset this pipeline uses [nextclade run](https://docs.nextst

```yaml
preprocessing:
- configFile:
nextclade_dataset_name: nextstrain/mpox/all-clades
- configFile:
nextclade_dataset_name: nextstrain/mpox/all-clades
```
Additionally the pipeline performs checks on the metadata fields. The checks are defined by custom preprocessing functions in the `values.yaml` file. These checks can be applied to and customized for other metadata fields, see [Preprocessing Checks](https://github.com/loculus-project/loculus/blob/main/preprocessing/nextclade/README.md#preprocessing-checks) for more info.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,4 +25,4 @@ While the exact functionality depends on the specific pipeline, generally a pipe

As the preprocessing logic might change over time, preprocessing pipelines are versioned (You specify the pipeline version under `<organismConfig>.preprocessing.version`).
The backend keeps track of which sequences have successfully been processed with which pipeline version.
Once all data for an organism has successfully been processed with a new version, that version will also automatically be served to users.
Once all data for an organism has successfully been processed with a new version, that version will also automatically be served to users.
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ sequenceFlagging:
github:
organization: pathoplexus
repository: curation_reports
issueTemplate: sequence-metadata-issue.md # (optional)
issueTemplate: sequence-metadata-issue.md # (optional)
```
Sequence reporting is done through GitHub issues.
Expand Down
118 changes: 59 additions & 59 deletions docs/src/content/docs/for-administrators/setup-with-k3d-and-nginx.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -98,62 +98,62 @@ runDevelopmentMainDatabase: true
runDevelopmentKeycloakDatabase: true

website:
runtimeConfig:
public:
backendUrl: 'https://api.<your domain>/backend'
lapisUrlTemplate: 'https://api.<your domain>/%organism%'
keycloakUrl: 'https://auth.<your domain>'
runtimeConfig:
public:
backendUrl: 'https://api.<your domain>/backend'
lapisUrlTemplate: 'https://api.<your domain>/%organism%'
keycloakUrl: 'https://auth.<your domain>'

auth:
verifyEmail: false
resetPasswordAllowed: false
registrationAllowed: true
smtp: null
identityProviders: null
verifyEmail: false
resetPasswordAllowed: false
registrationAllowed: true
smtp: null
identityProviders: null

disableIngest: true
disableEnaSubmission: true

seqSets:
enabled: false
enabled: false

secrets:
keycloak-admin:
type: raw
data:
initialAdminPassword: '<your initial admin password>'
keycloak-admin:
type: raw
data:
initialAdminPassword: '<your initial admin password>'

organisms:
angelovirus:
schema:
organismName: 'Angelovirus'
metadata:
- name: country
type: string
initiallyVisible: true
- name: city
type: string
initiallyVisible: true
website:
tableColumns:
- country
- city
defaultOrder: descending
defaultOrderBy: country
preprocessing:
- version: 1
image: ghcr.io/loculus-project/preprocessing-nextclade
args:
- 'prepro'
configFile:
log_level: DEBUG
genes: []
batch_size: 100
referenceGenomes:
nucleotideSequences:
- name: 'main'
sequence: 'NNN' # We are not performing alignment here, so this sequence doesn't matter
genes: []
angelovirus:
schema:
organismName: 'Angelovirus'
metadata:
- name: country
type: string
initiallyVisible: true
- name: city
type: string
initiallyVisible: true
website:
tableColumns:
- country
- city
defaultOrder: descending
defaultOrderBy: country
preprocessing:
- version: 1
image: ghcr.io/loculus-project/preprocessing-nextclade
args:
- 'prepro'
configFile:
log_level: DEBUG
genes: []
batch_size: 100
referenceGenomes:
nucleotideSequences:
- name: 'main'
sequence: 'NNN' # We are not performing alignment here, so this sequence doesn't matter
genes: []
```
## Step 4: Install and deploy
Expand Down Expand Up @@ -300,20 +300,20 @@ Then, adapt `my-values.yaml` and add under the `secrets` key:

```yaml
secrets:
database:
type: raw
data:
url: 'jdbc:postgresql://<Loculus database host>:<Loculus database port>/<Loculus database name>'
username: '<Loculus database user>'
password: '<Loculus database password>'
keycloak-database:
type: raw
data:
addr: '<Keycloak database host>'
port: '<Keycloak database port>'
database: '<Keycloak database name>'
username: '<Keycloak database user>'
password: '<Keycloak database password>'
database:
type: raw
data:
url: 'jdbc:postgresql://<Loculus database host>:<Loculus database port>/<Loculus database name>'
username: '<Loculus database user>'
password: '<Loculus database password>'
keycloak-database:
type: raw
data:
addr: '<Keycloak database host>'
port: '<Keycloak database port>'
database: '<Keycloak database name>'
username: '<Keycloak database user>'
password: '<Keycloak database password>'
```
To reinstall Loculus, execute:
Expand Down
Loading

0 comments on commit 38458a8

Please sign in to comment.