diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml index 700014a..43b7a3c 100644 --- a/.devcontainer/docker-compose.yml +++ b/.devcontainer/docker-compose.yml @@ -1,10 +1,6 @@ -version: '3.8' - services: couchdb: - build: - context: .. - dockerfile: Dockerfile + image: couchdb:3.1.1 environment: - COUCHDB_USER=admin - COUCHDB_PASSWORD=secret diff --git a/.github/workflows/docker-publish.yml b/.github/workflows/docker-publish.yml new file mode 100644 index 0000000..58e02cc --- /dev/null +++ b/.github/workflows/docker-publish.yml @@ -0,0 +1,75 @@ +name: Build and Publish Docker Image + +on: + push: + branches: + - main + paths: + - 'Dockerfile' + - 'seed/**' + - 'scripts/**' + - '.github/workflows/docker-publish.yml' + pull_request: + branches: + - main + workflow_dispatch: # Allow manual trigger + release: + types: [published] + +env: + REGISTRY: ghcr.io + IMAGE_NAME: ${{ github.repository }} + +jobs: + build-and-push: + runs-on: ubuntu-latest + permissions: + contents: read + packages: write + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Log in to Container Registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + with: + registry: ${{ env.REGISTRY }} + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Extract metadata (tags, labels) + id: meta + uses: docker/metadata-action@v5 + with: + images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + tags: | + type=ref,event=branch + type=ref,event=pr + type=sha,prefix= + type=raw,value=latest,enable={{is_default_branch}} + type=semver,pattern={{version}} + type=semver,pattern={{major}}.{{minor}} + + - name: Build and push Docker image + uses: docker/build-push-action@v6 + with: + context: . + push: ${{ github.event_name != 'pull_request' }} + tags: ${{ steps.meta.outputs.tags }} + labels: ${{ steps.meta.outputs.labels }} + cache-from: type=gha + cache-to: type=gha,mode=max + platforms: linux/amd64,linux/arm64 + + - name: Generate artifact attestation + if: github.event_name != 'pull_request' + uses: actions/attest-build-provenance@v2 + with: + subject-name: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }} + subject-digest: ${{ steps.build-and-push.outputs.digest }} + push-to-registry: true diff --git a/.gitignore b/.gitignore index b7faf40..89e7a05 100644 --- a/.gitignore +++ b/.gitignore @@ -1,139 +1,3 @@ -# Byte-compiled / optimized / DLL files -__pycache__/ -*.py[codz] -*$py.class - -# C extensions -*.so - -# Distribution / packaging -.Python -build/ -develop-eggs/ -dist/ -downloads/ -eggs/ -.eggs/ -lib/ -lib64/ -parts/ -sdist/ -var/ -wheels/ -share/python-wheels/ -*.egg-info/ -.installed.cfg -*.egg -MANIFEST - -# PyInstaller -# Usually these files are written by a python script from a template -# before PyInstaller builds the exe, so as to inject date/other infos into it. -*.manifest -*.spec - -# Installer logs -pip-log.txt -pip-delete-this-directory.txt - -# Unit test / coverage reports -htmlcov/ -.tox/ -.nox/ -.coverage -.coverage.* -.cache -nosetests.xml -coverage.xml -*.cover -*.py.cover -.hypothesis/ -.pytest_cache/ -cover/ - -# Translations -*.mo -*.pot - -# Django stuff: -*.log -local_settings.py -db.sqlite3 -db.sqlite3-journal - -# Flask stuff: -instance/ -.webassets-cache - -# Scrapy stuff: -.scrapy - -# Sphinx documentation -docs/_build/ - -# PyBuilder -.pybuilder/ -target/ - -# Jupyter Notebook -.ipynb_checkpoints - -# IPython -profile_default/ -ipython_config.py - -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# UV -# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -#uv.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock -#poetry.toml - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -# pdm recommends including project-wide configuration in pdm.toml, but excluding .pdm-python. -# https://pdm-project.org/en/latest/usage/project/#working-with-version-control -#pdm.lock -#pdm.toml -.pdm-python -.pdm-build/ - -# pixi -# Similar to Pipfile.lock, it is generally recommended to include pixi.lock in version control. -#pixi.lock -# Pixi creates a virtual environment in the .pixi directory, just like venv module creates one -# in the .venv directory. It is recommended not to include this directory in version control. -.pixi - -# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm -__pypackages__/ - -# Celery stuff -celerybeat-schedule -celerybeat.pid - -# SageMath parsed files -*.sage.py - # Environments .env .envrc @@ -144,64 +8,4 @@ ENV/ env.bak/ venv.bak/ -# Spyder project settings -.spyderproject -.spyproject - -# Rope project settings -.ropeproject - -# mkdocs documentation -/site - -# mypy -.mypy_cache/ -.dmypy.json -dmypy.json - -# Pyre type checker -.pyre/ - -# pytype static type analyzer -.pytype/ - -# Cython debug symbols -cython_debug/ - -# PyCharm -# JetBrains specific template is maintained in a separate JetBrains.gitignore that can -# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore -# and can be added to the global gitignore or merged into this file. For a more nuclear -# option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ - -# Abstra -# Abstra is an AI-powered process automation framework. -# Ignore directories containing user credentials, local state, and settings. -# Learn more at https://abstra.io/docs -.abstra/ - -# Visual Studio Code -# Visual Studio Code specific template is maintained in a separate VisualStudioCode.gitignore -# that can be found at https://github.com/github/gitignore/blob/main/Global/VisualStudioCode.gitignore -# and can be added to the global gitignore or merged into this file. However, if you prefer, -# you could uncomment the following to ignore the entire vscode folder -# .vscode/ - -# Ruff stuff: -.ruff_cache/ - -# PyPI configuration file -.pypirc - -# Cursor -# Cursor is an AI-powered code editor. `.cursorignore` specifies files/directories to -# exclude from AI features like autocomplete and code analysis. Recommended for sensitive data -# refer to https://docs.cursor.com/context/ignore-files -.cursorignore -.cursorindexingignore - -# Marimo -marimo/_static/ -marimo/_lsp/ -__marimo__/ +DS_Store diff --git a/Dockerfile b/Dockerfile index d879ce1..c694abb 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,20 +1,21 @@ -# Use official CouchDB 3.1.1 image -FROM couchdb:3.1.1 +FROM couchdb:3.4 -# Do not include credentials in the image. Configure admin credentials at runtime -# via environment variables: COUCHDB_USER and COUCHDB_PASSWORD (or using -# COUCHDB_USER and COUCHDB_PASSWORD with the image's supported env vars). +# Install curl for healthchecks and data loading +RUN apt-get update && apt-get install -y --no-install-recommends \ + curl \ + && rm -rf /var/lib/apt/lists/* -# Create a directory for seeded data (optional mount point) -VOLUME ["/opt/couchdb/data", "/opt/couchdb/seed"] +# Copy seed data and initialization scripts +COPY seed/ /opt/couchdb/seed/ +COPY scripts/ /opt/couchdb/scripts/ -# Expose CouchDB default port -EXPOSE 5984 +# Make scripts executable +RUN chmod +x /opt/couchdb/scripts/*.sh -# Metadata -LABEL org.opencontainers.image.title="StatusDB_seed_data-couchdb" -LABEL org.opencontainers.image.description="CouchDB 3.1.1 image used for seeding StatusDB data. Configure admin credentials at runtime; do not bake secrets into the image." +# The base image already sets up CouchDB to run +# We use a custom entrypoint wrapper to initialize data on first boot +COPY scripts/docker-entrypoint-wrapper.sh /docker-entrypoint-wrapper.sh +RUN chmod +x /docker-entrypoint-wrapper.sh -# Default command from the official image is fine; we don't override it so -# the image behaves like the official CouchDB image. This Dockerfile exists -# mainly to provide a named image in the project and a mount point for seeds. +ENTRYPOINT ["tini", "--", "/docker-entrypoint-wrapper.sh"] +CMD ["/opt/couchdb/bin/couchdb"] diff --git a/README.md b/README.md index b20939e..477ebff 100644 --- a/README.md +++ b/README.md @@ -1,77 +1,98 @@ +# StatusDB Seed Data -# StatusDB_seed_data +Holds test data to fire up a dev instance of StatusDB (CouchDB). -Holds test data to fire up a dev instance of statusdb +## Docker Image -## Development Container (Recommended) +A pre-built Docker image with CouchDB and seed data is available: -The easiest way to get started is using the VS Code Dev Container: +```bash +docker pull ghcr.io/scilifelab/statusdb_seed_data:latest +``` -1. Install [VS Code](https://code.visualstudio.com/) and the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) -2. Open this repository in VS Code -3. When prompted, click "Reopen in Container" (or run `Dev Containers: Reopen in Container` from the command palette) -4. VS Code will build and start CouchDB automatically +### Running the Image + +```bash +docker run -d \ + -p 5984:5984 \ + -e COUCHDB_USER=admin \ + -e COUCHDB_PASSWORD=admin \ + ghcr.io/scilifelab/statusdb_seed_data:latest +``` CouchDB will be available at: - API: - Fauxton UI: -- Credentials: `admin` / `secret` +- Credentials: `admin` / `admin` -## Docker +The seed data is automatically loaded on first startup. -This repository contains a small `Dockerfile` that is a thin wrapper around the -official CouchDB 3.1.1 image and provides a mount point for seed data. +### Persisting Data -Build the image (from the repo root): +To persist data between container restarts: ```bash -docker build -t statusdb-seed-couchdb:3.1.1 . +docker run -d \ + -p 5984:5984 \ + -e COUCHDB_USER=admin \ + -e COUCHDB_PASSWORD=admin \ + -v couchdb-data:/opt/couchdb/data \ + ghcr.io/scilifelab/statusdb_seed_data:latest ``` -Run CouchDB with an admin user and mounted seed data directory: +## Development Container (VS Code) -```bash -# Start container with admin credentials and map port 5984 -docker run --rm -p 5984:5984 \ - -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=secret \ - -v "$PWD/seed:/opt/couchdb/seed" \ - statusdb-seed-couchdb:3.1.1 -``` +The easiest way to develop seed data is using the VS Code Dev Container: + +1. Install [VS Code](https://code.visualstudio.com/) and the [Dev Containers extension](https://marketplace.visualstudio.com/items?itemName=ms-vscode-remote.remote-containers) +2. Open this repository in VS Code +3. When prompted, click "Reopen in Container" (or run `Dev Containers: Reopen in Container` from the command palette) +4. VS Code will build and start CouchDB automatically + +## Seed Data Structure -Notes: +The `seed/` directory contains JSON documents that are loaded into CouchDB on startup. -- Do not commit credentials into the repository. Prefer runtime env vars or Docker - secrets in production. -- The official CouchDB image will initialize the database cluster and create the - admin user when `COUCHDB_USER` and `COUCHDB_PASSWORD` are provided. -- Place any JSON documents or init scripts you want applied at container startup - in the `seed` folder and mount it into `/opt/couchdb/seed`. +### Directory Structure -## Seed Data +``` +seed/ +├── / # Creates a database and loads all JSON files into it +│ ├── doc1.json +│ └── doc2.json +└── *.json # Top-level JSON files are loaded into 'statusdb' database +``` -The `seed/` directory contains example JSON documents that can be loaded into CouchDB -to populate a development instance with test data. +### Document Format -### Example Documents +Each JSON file should contain a single CouchDB document. If the document has an `_id` field, it will be used as the document ID. Otherwise, CouchDB will auto-generate an ID. -- **`example_project.json`**: A sample genomics project document with project metadata, - sample information, and sequencing run details. This demonstrates the typical structure - for a StatusDB project entry. +Example document (`seed/example_project.json`): -### Loading Seed Data +```json +{ + "_id": "project_001", + "type": "project", + "name": "Example Genomics Project", + "project_id": "P12345", + ... +} +``` -To load seed data into your running CouchDB instance, you can use the CouchDB HTTP API: +## Building the Image Locally ```bash -# Create a database (if it doesn't exist) -curl -X PUT http://admin:secret@localhost:5984/statusdb - -# Load a document -curl -X POST http://admin:secret@localhost:5984/statusdb \ - -H "Content-Type: application/json" \ - -d @seed/example_project.json +docker build -t statusdb_seed_data . +docker run -p 5984:5984 -e COUCHDB_USER=admin -e COUCHDB_PASSWORD=admin statusdb_seed_data ``` -Alternatively, you can use the CouchDB web interface (Fauxton) at -to manually create databases and upload documents. +## Adding New Seed Data + +1. Add JSON files to the `seed/` directory (or subdirectories for specific databases) +2. Commit and push to `main` branch +3. GitHub Actions will automatically build and publish a new image + +## Using with Genomics Status + +The [genomics-status](https://github.com/SciLifeLab/genomics-status) repository is configured to use this image in its dev container setup. When you open genomics-status in VS Code with Dev Containers, it will automatically pull this image and start CouchDB with the seed data. diff --git a/scripts/docker-entrypoint-wrapper.sh b/scripts/docker-entrypoint-wrapper.sh new file mode 100644 index 0000000..b258802 --- /dev/null +++ b/scripts/docker-entrypoint-wrapper.sh @@ -0,0 +1,12 @@ +#!/bin/bash +set -e + +# This wrapper script runs the initialization in the background after CouchDB starts +# The actual data loading happens via init-seed-data.sh + +# Start the initialization script in the background +# It will wait for CouchDB to be ready before loading data +/opt/couchdb/scripts/init-seed-data.sh & + +# Execute the original CouchDB entrypoint +exec /docker-entrypoint.sh "$@" diff --git a/scripts/init-seed-data.sh b/scripts/init-seed-data.sh new file mode 100644 index 0000000..f7e04ec --- /dev/null +++ b/scripts/init-seed-data.sh @@ -0,0 +1,139 @@ +#!/bin/bash +set -e + +# Configuration +COUCHDB_HOST="${COUCHDB_HOST:-localhost}" +COUCHDB_PORT="${COUCHDB_PORT:-5984}" +COUCHDB_USER="${COUCHDB_USER:-admin}" +COUCHDB_PASSWORD="${COUCHDB_PASSWORD:-admin}" +SEED_DIR="${SEED_DIR:-/opt/couchdb/seed}" +INIT_MARKER="/opt/couchdb/data/.seed_initialized" + +COUCHDB_URL="http://${COUCHDB_USER}:${COUCHDB_PASSWORD}@${COUCHDB_HOST}:${COUCHDB_PORT}" + +# Wait for CouchDB to be ready +wait_for_couchdb() { + echo "Waiting for CouchDB to be ready..." + local max_attempts=30 + local attempt=1 + + while [ $attempt -le $max_attempts ]; do + if curl -s "${COUCHDB_URL}/_up" | grep -q '"status":"ok"'; then + echo "CouchDB is ready!" + return 0 + fi + echo "Attempt $attempt/$max_attempts: CouchDB not ready yet..." + sleep 2 + attempt=$((attempt + 1)) + done + + echo "ERROR: CouchDB failed to start within expected time" + return 1 +} + +# Create system databases required by CouchDB +create_system_databases() { + echo "Creating system databases..." + for db in _users _replicator _global_changes; do + curl -s -X PUT "${COUCHDB_URL}/${db}" > /dev/null 2>&1 || true + done +} + +# Create a database if it doesn't exist +create_database() { + local db_name="$1" + echo "Creating database: ${db_name}" + local response=$(curl -s -w "\n%{http_code}" -X PUT "${COUCHDB_URL}/${db_name}") + local http_code=$(echo "$response" | tail -n1) + + if [ "$http_code" = "201" ]; then + echo " Database '${db_name}' created successfully" + elif [ "$http_code" = "412" ]; then + echo " Database '${db_name}' already exists" + else + echo " Warning: Unexpected response creating '${db_name}': $http_code" + fi +} + +# Load a single JSON document into a database +load_document() { + local db_name="$1" + local json_file="$2" + + local doc_id=$(jq -r '._id // empty' "$json_file" 2>/dev/null) + + if [ -n "$doc_id" ]; then + # Document has an _id, use PUT + echo " Loading document '${doc_id}' into '${db_name}'..." + curl -s -X PUT "${COUCHDB_URL}/${db_name}/${doc_id}" \ + -H "Content-Type: application/json" \ + -d @"$json_file" > /dev/null + else + # No _id, use POST to auto-generate + echo " Loading document from '$(basename "$json_file")' into '${db_name}'..." + curl -s -X POST "${COUCHDB_URL}/${db_name}" \ + -H "Content-Type: application/json" \ + -d @"$json_file" > /dev/null + fi +} + +# Load all seed data +load_seed_data() { + echo "Loading seed data from ${SEED_DIR}..." + + # Check if seed directory exists and has files + if [ ! -d "$SEED_DIR" ]; then + echo "Seed directory not found: ${SEED_DIR}" + return 0 + fi + + # Load database-specific directories + # Structure: seed//*.json + for db_dir in "$SEED_DIR"/*/; do + if [ -d "$db_dir" ]; then + local db_name=$(basename "$db_dir") + echo "Processing database: ${db_name}" + create_database "$db_name" + + # Load all JSON files in the database directory + for json_file in "$db_dir"/*.json; do + if [ -f "$json_file" ]; then + load_document "$db_name" "$json_file" + fi + done + fi + done + + # Also load any top-level JSON files into a 'statusdb' database (legacy support) + local has_toplevel_json=false + for json_file in "$SEED_DIR"/*.json; do + if [ -f "$json_file" ]; then + if [ "$has_toplevel_json" = false ]; then + create_database "statusdb" + has_toplevel_json=true + fi + load_document "statusdb" "$json_file" + fi + done + + echo "Seed data loading complete!" +} + +# Main execution +main() { + # Check if we've already initialized + if [ -f "$INIT_MARKER" ]; then + echo "Seed data already initialized, skipping..." + exit 0 + fi + + wait_for_couchdb + create_system_databases + load_seed_data + + # Create marker file to indicate initialization is complete + touch "$INIT_MARKER" + echo "Initialization complete!" +} + +main "$@" diff --git a/seed/agreements/_design_project.json b/seed/agreements/_design_project.json new file mode 100644 index 0000000..e674061 --- /dev/null +++ b/seed/agreements/_design_project.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/project", + "views": { + "project_id": { + "map": "function(doc) { if (doc.project_id) { emit(doc.project_id, doc); } }" + } + } +} diff --git a/seed/gs_configs/genstat_defaults.json b/seed/gs_configs/genstat_defaults.json new file mode 100644 index 0000000..6439321 --- /dev/null +++ b/seed/gs_configs/genstat_defaults.json @@ -0,0 +1,66 @@ +{ + "_id": "genstat_defaults", + "roles": { + "admin": "Administrator", + "pricing_admin": "Pricing Administrator", + "sample_requirements_admin": "Sample Requirements Admin", + "proj_coord": "Project Coordinator", + "api_user": "API User" + }, + "pv_columns": { + "BASIC_COLUMNS": { + "Project ID": "project_id", + "Project Name": "project_name", + "Application": "application", + "Status": "status" + }, + "SETUP_PROJECT_COLUMNS": { + "Open Date": "open_date", + "Queue Date": "queued", + "Close Date": "close_date" + }, + "BIOINFO_COLUMNS": { + "Bioinfo Responsible": "bioinfo_responsible" + }, + "SEQUENCING_COLUMNS": { + "Sequencing Platform": "sequencing_platform" + }, + "ACCREDITATION_COLUMNS": { + "Accredited": "accredited" + }, + "DETAILS_COLUMNS": { + "Project Type": "type", + "Sample Type": "sample_type" + }, + "LIBRARY_PREP_COLUMNS": { + "Library Prep": "library_construction_method" + }, + "EXTRA_COLUMNS": {} + }, + "sample_columns": { + "BASIC_COLUMNS": { + "Sample ID": "sample_id", + "Sample Name": "sample_name" + }, + "DETAILS_COLUMNS": { + "Organism": "organism", + "Tissue": "tissue" + } + }, + "project_summary_presets": {"default": {}}, + "flowcell_presets": {"default": {}}, + "workset_presets": {"default": {}}, + "pv_presets": { + "All Projects": { + "ICON": {"glyphicon": "fa-list"}, + "BASIC_COLUMNS": {"project_id": true, "project_name": true, "application": true, "status": true}, + "SETUP_PROJECT_COLUMNS": {"open_date": true}, + "STATUS": ["ongoing", "reception control", "pending"] + }, + "Ongoing": { + "ICON": {"glyphicon": "fa-spinner"}, + "BASIC_COLUMNS": {"project_id": true, "project_name": true, "application": true, "status": true}, + "STATUS": ["ongoing"] + } + } +} diff --git a/seed/gs_configs/gs_teams.json b/seed/gs_configs/gs_teams.json new file mode 100644 index 0000000..005aa5d --- /dev/null +++ b/seed/gs_configs/gs_teams.json @@ -0,0 +1,10 @@ +{ + "_id": "gs_teams", + "teams": { + "library_prep": "Library Preparation", + "sequencing": "Sequencing", + "bioinformatics": "Bioinformatics", + "project_coordination": "Project Coordination", + "applications": "Applications" + } +} diff --git a/seed/gs_users/_design_authorized.json b/seed/gs_users/_design_authorized.json new file mode 100644 index 0000000..1b25cc0 --- /dev/null +++ b/seed/gs_users/_design_authorized.json @@ -0,0 +1,11 @@ +{ + "_id": "_design/authorized", + "views": { + "users": { + "map": "function(doc) { if (doc.email) { emit(doc.email, doc); } }" + }, + "info": { + "map": "function(doc) { if (doc.email) { emit(doc.email, {name: doc.name, initials: doc.initials, roles: doc.roles, teams: doc.teams}); } }" + } + } +} diff --git a/seed/gs_users/test_admin.json b/seed/gs_users/test_admin.json new file mode 100644 index 0000000..8c7c046 --- /dev/null +++ b/seed/gs_users/test_admin.json @@ -0,0 +1,8 @@ +{ + "_id": "test@example.com", + "email": "test@example.com", + "name": "Test Admin", + "initials": "TA", + "roles": ["admin"], + "teams": ["bioinformatics"] +} diff --git a/seed/people_assignments/_design_current.json b/seed/people_assignments/_design_current.json new file mode 100644 index 0000000..14da720 --- /dev/null +++ b/seed/people_assignments/_design_current.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/current", + "views": { + "assignments": { + "map": "function(doc) { if (doc.project_id && doc.assigned_to) { emit(doc.project_id, doc.assigned_to); } }" + } + } +} diff --git a/seed/projects/P12345.json b/seed/projects/P12345.json new file mode 100644 index 0000000..3bda2f0 --- /dev/null +++ b/seed/projects/P12345.json @@ -0,0 +1,17 @@ +{ + "_id": "P12345", + "project_id": "P12345", + "project_name": "Test RNA-seq Project", + "application": "RNA-seq", + "status": "ongoing", + "open_date": "2025-11-15", + "queued": "2025-11-20", + "details": { + "type": "Production", + "sample_type": "Total RNA", + "queued": "2025-11-20" + }, + "sequencing_platform": "NovaSeq 6000", + "library_construction_method": "TruSeq Stranded mRNA", + "summary_dates": {} +} diff --git a/seed/projects/P12346.json b/seed/projects/P12346.json new file mode 100644 index 0000000..b47379f --- /dev/null +++ b/seed/projects/P12346.json @@ -0,0 +1,15 @@ +{ + "_id": "P12346", + "project_id": "P12346", + "project_name": "Whole Genome Sequencing Study", + "application": "WGS", + "status": "reception control", + "open_date": "2025-12-01", + "details": { + "type": "Production", + "sample_type": "Genomic DNA" + }, + "sequencing_platform": "NovaSeq X", + "library_construction_method": "PCR-free", + "summary_dates": {} +} diff --git a/seed/projects/P12347.json b/seed/projects/P12347.json new file mode 100644 index 0000000..38b6163 --- /dev/null +++ b/seed/projects/P12347.json @@ -0,0 +1,18 @@ +{ + "_id": "P12347", + "project_id": "P12347", + "project_name": "Single Cell Analysis", + "application": "10X Genomics", + "status": "closed", + "open_date": "2025-06-01", + "queued": "2025-06-05", + "close_date": "2025-11-10", + "details": { + "type": "Production", + "sample_type": "Single cells", + "queued": "2025-06-05" + }, + "sequencing_platform": "NovaSeq 6000", + "library_construction_method": "10X Chromium", + "summary_dates": {} +} diff --git a/seed/projects/_design_project.json b/seed/projects/_design_project.json new file mode 100644 index 0000000..67a7ad6 --- /dev/null +++ b/seed/projects/_design_project.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/project", + "views": { + "summary_status": { + "map": "function(doc) { if (doc.status) { emit([doc.status, doc.project_id], doc); } }" + } + } +} diff --git a/seed/running_notes/_design_latest_note_previews.json b/seed/running_notes/_design_latest_note_previews.json new file mode 100644 index 0000000..2b140e7 --- /dev/null +++ b/seed/running_notes/_design_latest_note_previews.json @@ -0,0 +1,9 @@ +{ + "_id": "_design/latest_note_previews", + "views": { + "project": { + "map": "function(doc) { if (doc.project_id && doc.created_at_utc) { emit(doc.project_id, {created_at_utc: doc.created_at_utc, note: doc.note}); } }", + "reduce": "_stats" + } + } +} diff --git a/seed/sensorpush/_design_entire_document.json b/seed/sensorpush/_design_entire_document.json new file mode 100644 index 0000000..b3c51da --- /dev/null +++ b/seed/sensorpush/_design_entire_document.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/entire_document", + "views": { + "by_sensor_id_and_date": { + "map": "function(doc) { if (doc.sensor_id && doc.date) { emit([doc.sensor_id, doc.date], doc); } }" + } + } +} diff --git a/seed/sensorpush/_design_sensor_id.json b/seed/sensorpush/_design_sensor_id.json new file mode 100644 index 0000000..fef6c07 --- /dev/null +++ b/seed/sensorpush/_design_sensor_id.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/sensor_id", + "views": { + "by_date": { + "map": "function(doc) { if (doc.date && doc.sensor_id) { emit(doc.date, doc.sensor_id); } }" + } + } +} diff --git a/seed/server_status/_design_nases.json b/seed/server_status/_design_nases.json new file mode 100644 index 0000000..4a07e0c --- /dev/null +++ b/seed/server_status/_design_nases.json @@ -0,0 +1,8 @@ +{ + "_id": "_design/nases", + "views": { + "by_timestamp": { + "map": "function(doc) { if (doc.timestamp) { emit(doc.timestamp, doc); } }" + } + } +} diff --git a/seed/server_status/example_status.json b/seed/server_status/example_status.json new file mode 100644 index 0000000..7be86ed --- /dev/null +++ b/seed/server_status/example_status.json @@ -0,0 +1,8 @@ +{ + "_id": "status_001", + "timestamp": "2025-12-05T09:00:00Z", + "name": "dev-nas-01", + "status": "ok", + "disk_usage": 50, + "disk_total": 100 +}