Skip to content

Commit

Permalink
Merge branch 'master' into remove-comment
Browse files Browse the repository at this point in the history
  • Loading branch information
alxndrsn committed Dec 10, 2024
2 parents 9e96df4 + 7bb0360 commit e4c780e
Show file tree
Hide file tree
Showing 21 changed files with 475 additions and 188 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ version: 2.1
jobs:
build:
docker:
- image: cimg/node:20.17.0
- image: cimg/node:22.12.0
- image: cimg/postgres:14.10
environment:
POSTGRES_PASSWORD: odktest
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/oidc-e2e.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: OIDC e2e tests

on: push
on:
push:
pull_request:

env:
DEBUG: pw:api
Expand All @@ -27,10 +29,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.17.0
node-version: 22.12.0
cache: 'npm'
- run: make test-oidc-e2e
- name: Archive playwright screenshots
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/oidc-integration.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: OIDC integration tests

on: push
on:
push:
pull_request:

jobs:
oidc-integration-test:
Expand All @@ -23,10 +25,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.17.0
node-version: 22.12.0
cache: 'npm'
- run: npm ci
- run: FAKE_OIDC_ROOT_URL=http://localhost:9898 make fake-oidc-server-ci > fake-oidc-server.log &
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/s3-e2e.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: S3 E2E Tests

on: push
on:
push:
pull_request:

jobs:
s3-e2e:
Expand Down Expand Up @@ -41,10 +43,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.17.0
node-version: 22.12.0
cache: 'npm'
- run: npm ci
- run: node lib/bin/create-docker-databases.js
Expand Down
4 changes: 2 additions & 2 deletions .github/workflows/soak-test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -25,10 +25,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.17.0
node-version: 22.12.0
cache: 'npm'
- run: npm ci
- run: node lib/bin/create-docker-databases.js
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/standard-e2e.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: Standard E2E Tests

on: push
on:
push:
pull_request:

env:
LOG_LEVEL: DEBUG
Expand All @@ -26,10 +28,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.10.0
node-version: 22.12.0
cache: 'npm'
- run: npm ci
- run: node lib/bin/create-docker-databases.js
Expand Down
8 changes: 5 additions & 3 deletions .github/workflows/standard-suite.yml
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
name: Full Standard Test Suite

on: push
on:
push:
pull_request:

jobs:
standard-tests:
Expand All @@ -23,10 +25,10 @@ jobs:
--health-retries 5
steps:
- uses: actions/checkout@v4
- name: Use Node.js 20
- name: Set node version
uses: actions/setup-node@v4
with:
node-version: 20.17.0
node-version: 22.12.0
cache: 'npm'
- run: npm ci
- run: node lib/bin/create-docker-databases.js
Expand Down
10 changes: 5 additions & 5 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -101,27 +101,27 @@ debug: base

.PHONY: test
test: lint
BCRYPT=insecure npx mocha --recursive
BCRYPT=insecure npx --node-options="--no-deprecation" mocha --recursive

.PHONY: test-ci
test-ci: lint
BCRYPT=insecure npx mocha --recursive --reporter test/ci-mocha-reporter.js
BCRYPT=insecure npx --node-options="--no-deprecation" mocha --recursive --reporter test/ci-mocha-reporter.js

.PHONY: test-fast
test-fast: node_version
BCRYPT=insecure npx mocha --recursive --fgrep @slow --invert
BCRYPT=insecure npx --node-options="--no-deprecation" mocha --recursive --fgrep @slow --invert

.PHONY: test-integration
test-integration: node_version
BCRYPT=insecure npx mocha --recursive test/integration
BCRYPT=insecure npx --node-options="--no-deprecation" mocha --recursive test/integration

.PHONY: test-unit
test-unit: node_version
BCRYPT=insecure npx mocha --recursive test/unit

.PHONY: test-coverage
test-coverage: node_version
npx nyc -x "**/migrations/**" --reporter=lcov node_modules/.bin/_mocha --recursive test
npx --node-options="--no-deprecation" nyc -x "**/migrations/**" --reporter=lcov node_modules/.bin/_mocha --recursive test

.PHONY: lint
lint: node_version
Expand Down
3 changes: 3 additions & 0 deletions docs/api.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@ info:
**Changed**:

- [Submissions Odata]() now returns `__system/deletedAt`. It can also be used in $filter, $sort and $select query parameters.
- Dataset (Entity List) properties with the same name but different capitalization are not allowed.

## ODK Central v2024.2

Expand Down Expand Up @@ -8707,6 +8708,8 @@ paths:
description: |-
Creates a new Property with a specified name in the Dataset.

The name of a Property is case-sensitive in that it will keep the capitalization provided (e.g. "Firstname"). But Central will not allow another Property with the same name but different capitalization to be created (e.g. "FIRSTNAME" when "Firstname" already exists).

Property names follow the same rules as form field names (valid XML identifiers) and cannot use the reserved names of `name` or `label`, or begin with the reserved prefix `__`.
operationId: Adding Properties
parameters:
Expand Down
18 changes: 11 additions & 7 deletions lib/http/service.js
Original file line number Diff line number Diff line change
Expand Up @@ -122,14 +122,18 @@ module.exports = (container) => {
service.use((error, request, response, next) => {
if (response.headersSent === true) {
// In this case, we'll just let Express fail the request out.
next(error);
} else if (error?.type === 'entity.parse.failed') {
// catch body-parser middleware problems. we only ask it to parse JSON, which
// isn't part of OpenRosa, so we can assume a plain JSON response.
defaultErrorWriter(Problem.user.unparseable({ format: 'json', rawLength: error.body.length }), request, response);
} else {
defaultErrorWriter(error, request, response);
return next(error);
}

// catch body-parser middleware problems. we only ask it to parse JSON, which
// isn't part of OpenRosa, so we can assume a plain JSON response.
switch (error?.type) {
case 'encoding.unsupported': return defaultErrorWriter(Problem.user.encodingNotSupported(), request, response);
case 'entity.parse.failed': return defaultErrorWriter(Problem.user.unparseable({ format: 'json', rawLength: error.body.length }), request, response);
case 'entity.too.large': return defaultErrorWriter(Problem.user.requestTooLarge(), request, response);
default: return defaultErrorWriter(error, request, response);
}

});

return service;
Expand Down
63 changes: 53 additions & 10 deletions lib/model/query/datasets.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const { sql } = require('slonik');
const { extender, insert, QueryOptions, sqlEquals, unjoiner, updater } = require('../../util/db');
const { Dataset, Form, Audit } = require('../frames');
const { validatePropertyName } = require('../../data/dataset');
const { difference, isEmpty, isNil, either, reduceBy, groupBy, uniqWith, equals, map } = require('ramda');
const { difference, isEmpty, isNil, either, reduceBy, groupBy, uniqWith, equals, map, pipe, values, any, prop, toLower } = require('ramda');

const Problem = require('../../util/problem');
const { construct } = require('../../util/util');
Expand Down Expand Up @@ -110,21 +110,22 @@ const createOrMerge = (parsedDataset, form, fields) => async ({ context, one, Ac
// Prepare dataset properties from form fields:
// Step 1: Filter to only fields with property name binds
let dsPropertyFields = fields.filter((field) => (field.propertyName));
// Step 2: Check for invalid property names
// Step 2a: Check for invalid property names
if (dsPropertyFields.filter((field) => !validatePropertyName(field.propertyName)).length > 0)
throw Problem.user.invalidEntityForm({ reason: 'Invalid entity property name.' });

// Step 2b: Multiple fields trying to save to a single property (case-insensitive)
const hasDuplicateProperties = pipe(groupBy(pipe(prop('propertyName'), toLower)), values, any(g => g.length > 1));
if (hasDuplicateProperties(dsPropertyFields)) {
throw Problem.user.invalidEntityForm({ reason: 'Multiple Form Fields cannot be saved to a single property.' });
}

// Step 3: Build Form Field frames to handle dataset property field insertion
dsPropertyFields = dsPropertyFields.map((field) => new Form.Field(field, { propertyName: field.propertyName, schemaId, formDefId }));

// Insert or update: update dataset, dataset properties, and links to fields and current form
// result contains action (created or updated) and the id of the dataset.
const result = await one(_createOrMerge(partial, acteeId, parsedDataset.actions, dsPropertyFields))
.catch(error => {
if (error.constraint === 'ds_property_fields_dspropertyid_formdefid_unique') {
throw Problem.user.invalidEntityForm({ reason: 'Multiple Form Fields cannot be saved to a single property.' });
}
throw error;
});
const result = await one(_createOrMerge(partial, acteeId, parsedDataset.actions, dsPropertyFields));

// Verify that user has ability to create dataset
// Updating an unpublished dataset is equivalent to creating the dataset
Expand All @@ -139,6 +140,18 @@ const createOrMerge = (parsedDataset, form, fields) => async ({ context, one, Ac
const newNames = dsPropertyFields.map(f => f.propertyName);
if (difference(newNames, existingNames).length > 0)
await context.auth.canOrReject('dataset.update', { acteeId });

// Check properties with same name but different capitualization
const duplicateProperties = newNames
.filter(newName => !existingNames.includes(newName))
.map(newName => ({
current: existingNames.find(existingName => newName.toLowerCase() === existingName.toLowerCase()),
provided: newName
}))
.filter(property => property.current);
if (duplicateProperties.length > 0) {
throw Problem.user.propertyNameConflict({ duplicateProperties });
}
}

// Partial contains acteeId which is needed for auditing.
Expand Down Expand Up @@ -184,8 +197,21 @@ DO UPDATE SET "publishedAt" = clock_timestamp()
WHERE ds_properties."publishedAt" IS NULL
RETURNING *`;

const _getDuplicatePropertyName = (property) => sql`
SELECT name FROM ds_properties
WHERE lower(name) = lower(${property.name})
AND "datasetId" = ${property.datasetId}
AND "publishedAt" IS NOT NULL
`;

// eslint-disable-next-line no-unused-vars
const createPublishedProperty = (property, dataset) => async ({ all }) => {
const createPublishedProperty = (property, dataset) => async ({ all, maybeOne }) => {

const duplicateProperty = await maybeOne(_getDuplicatePropertyName(property));
if (duplicateProperty.isDefined()) {
throw Problem.user.uniquenessViolation({ table: 'ds_properties', fields: [ 'name', 'datasetId' ], values: [ duplicateProperty.get().name, dataset.id ] });
}

const result = await all(_createPublishedProperty(property));
if (result.length === 0)
throw Problem.user.uniquenessViolation({ table: 'ds_properties', fields: [ 'name', 'datasetId' ], values: [ property.name, dataset.id ] });
Expand Down Expand Up @@ -255,12 +281,29 @@ const publishIfExists = (formDefId, publishedAt) => async ({ all, context, maybe
AND ds_to_publish."publishedAt" IS NULL
`;

const _propertyNameConflict = () => sql`
SELECT dp.name as provided, existing_properties.name as current
FROM ds_property_fields dpf
JOIN ds_properties dp ON dp.id = dpf."dsPropertyId"
JOIN datasets ds ON ds.id = dp."datasetId"
JOIN ds_properties existing_properties ON existing_properties."datasetId" = ds.id
AND LOWER(dp.name) = LOWER(existing_properties.name)
WHERE dpf."formDefId" = ${formDefId}
AND existing_properties."publishedAt" IS NOT NULL
AND dp."publishedAt" IS NULL
`;

const d = await maybeOne(_datasetNameConflict());
if (d.isDefined()) {
const { current, provided } = d.get();
throw Problem.user.datasetNameConflict({ current, provided });
}

const duplicateProperties = await all(_propertyNameConflict());
if (duplicateProperties.length > 0) {
throw Problem.user.propertyNameConflict({ duplicateProperties });
}

const properties = await all(_publishIfExists());
// `properties` contains a list of objects with the following structure:
// property id, property name, dataset id, dataset actee id, and action
Expand Down
4 changes: 2 additions & 2 deletions lib/resources/odata.js
Original file line number Diff line number Diff line change
Expand Up @@ -55,8 +55,8 @@ module.exports = (service, endpoint) => {
});

// TODO: because of the way express compiles the *, we have to register this twice.
service.get(`${base}/Submissions\\((:uuid)\\)`, singleRecord);
service.get(`${base}/Submissions\\((:uuid)\\)/*`, singleRecord);
service.get(`${base}/Submissions\\(:uuid\\)`, singleRecord);
service.get(`${base}/Submissions\\(:uuid\\)/*`, singleRecord);

// serves table data.
service.get(`${base}/:table`, endpoint.odata.json(({ Forms, Submissions, env }, { auth, params, originalUrl, query }) =>
Expand Down
7 changes: 6 additions & 1 deletion lib/util/problem.js
Original file line number Diff line number Diff line change
Expand Up @@ -136,6 +136,10 @@ const problems = {

odataInvalidSkiptoken: problem(400.35, () => 'Invalid $skiptoken'),

requestTooLarge: problem(400.36, () => 'Request body too large.'),

encodingNotSupported: problem(400.37, () => 'Encoding not supported.'),

// no detail information for security reasons.
authenticationFailed: problem(401.2, () => 'Could not authenticate with the provided credentials.'),

Expand Down Expand Up @@ -225,8 +229,9 @@ const problems = {

datasetNameConflict: problem(409.16, ({ current, provided }) => `A dataset named '${current}' exists and you provided '${provided}' with the same name but different capitalization.`),

duplicateSubmission: problem(409.18, () => `This submission has been deleted. You may not resubmit it.`)
propertyNameConflict: problem(409.17, () => 'This form attempts to create new Entity properties that match with existing ones except for capitalization.'),

duplicateSubmission: problem(409.18, () => `This submission has been deleted. You may not resubmit it.`)
},
internal: {
// no detail information, as this is only called when we don't know what happened.
Expand Down
Loading

0 comments on commit e4c780e

Please sign in to comment.