Skip to content

Commit

Permalink
Merge branch 'master' into 2.42-DHIS2-18368_3
Browse files Browse the repository at this point in the history
  • Loading branch information
d-bernat authored Dec 23, 2024
2 parents 30ab376 + 6cb0d1f commit a6cce87
Show file tree
Hide file tree
Showing 29 changed files with 502 additions and 147 deletions.
12 changes: 0 additions & 12 deletions .github/dependabot.yml
Original file line number Diff line number Diff line change
Expand Up @@ -52,12 +52,6 @@ updates:
- dependency-name: "org.springframework.ldap:*" # Spring ldap 3.x requires Spring 6 (see above)
versions:
- ">= 3.0"
- dependency-name: "org.hisp.dhis.parser:*" # Antlr parser must be upgraded manually due to circular dependency with rule engine
versions:
- ">= 1.0"
- dependency-name: "org.hisp.dhis.rules:*" # Rule engine must be upgraded manually due to circular dependency with ANTLR parser
versions:
- ">= 2.0"
- dependency-name: "org.slf4j:slf4j-api" # will update in https://dhis2.atlassian.net/browse/DHIS2-16504
versions:
- ">= 2.0"
Expand Down Expand Up @@ -138,12 +132,6 @@ updates:
- dependency-name: "org.springframework.ldap:*" # Spring ldap 3.x requires Spring 6 (see above)
versions:
- ">= 3.0"
- dependency-name: "org.hisp.dhis.parser:*" # Antlr parser must be upgraded manually due to circular dependency with rule engine
versions:
- ">= 1.0"
- dependency-name: "org.hisp.dhis.rules:*" # Rule engine must be upgraded manually due to circular dependency with ANTLR parser
versions:
- ">= 2.0"
- dependency-name: "org.flywaydb:flyway-core" # It requires Postgres version to be >= 11
versions:
- "> 9.22.3"
Expand Down
51 changes: 51 additions & 0 deletions .github/workflows/generate-merge-boms.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,51 @@
name: Generate and Merge SBOMs and Upload them to DependencyTrack every night

on:
schedule:
- cron: "0 0 * * *" # Run every day at midnight

concurrency:
group: ${{ github.workflow}}-${{ github.ref }}
cancel-in-progress: true

jobs:
create-boms:
runs-on: ubuntu-latest
defaults:
run:
working-directory: dhis-2/

steps:
- uses: actions/checkout@v4
with:
ref: ${{ github.event.pull_request.head.sha }}
fetch-depth: 0
- name: Set up JDK 17
uses: actions/setup-java@v4
with:
java-version: 17
distribution: temurin
cache: maven

- name: Install CycloneDX CLI
run: |
curl -s https://api.github.com/repos/CycloneDX/cyclonedx-cli/releases/latest | grep "browser_download_url.*linux.x64" | cut -d '"' -f 4 | wget -i -
sudo mv cyclonedx-linux-x64 /usr/local/bin/
sudo chmod +x /usr/local/bin/cyclonedx-linux-x64
- name: Generate BOMs
run: mvn cyclonedx:makeBom

- name: Merge BOMs
run: cyclonedx-linux-x64 merge --input-files $(find . -name 'dxbom.json') --input-format json --output-file target/merged-bom.json --output-format json

- name: Upload SBOM to DependencyTrack
env:
DEPENDENCY_TRACK_API: "https://dt.security.dhis2.org/api/v1/bom"
run: |
curl -X POST "$DEPENDENCY_TRACK_API" \
--fail-with-body \
-H "Content-Type: multipart/form-data" \
-H "X-Api-Key: ${{ secrets.DEPENDENCYTRACK_APIKEY }}" \
-F "project=56383704-d5a2-4a35-ad6a-081f80f5d6d3" \
-F "bom=@target/merged-bom.json"
Original file line number Diff line number Diff line change
Expand Up @@ -121,6 +121,15 @@
@JacksonXmlRootElement(localName = "analyticalObject", namespace = DxfNamespaces.DXF_2_0)
public abstract class BaseAnalyticalObject extends BaseNameableObject implements AnalyticalObject {

private static final BaseDimensionalItemObject USER_OU_ITEM_OBJ =
buildDimItemObj(KEY_USER_ORGUNIT, "User organisation unit");

private static final BaseDimensionalItemObject USER_OU_CHILDREN_ITEM_OBJ =
buildDimItemObj(KEY_USER_ORGUNIT_CHILDREN, "User organisation unit children");

private static final BaseDimensionalItemObject USER_OU_GRANDCHILDREN_ITEM_OBJ =
buildDimItemObj(KEY_USER_ORGUNIT_GRANDCHILDREN, "User organisation unit grand children");

public static final String NOT_A_VALID_DIMENSION = "Not a valid dimension: %s";

/** Line and axis labels. */
Expand Down Expand Up @@ -320,6 +329,19 @@ public abstract void init(
List<OrganisationUnit> organisationUnitsInGroups,
I18nFormat format);

/**
* Returns the dimensional item object for the given dimension and name.
*
* @param uid the dimension uid.
* @param name the dimension name.
* @return the DimensionalObject.
*/
private static BaseDimensionalItemObject buildDimItemObj(String uid, String name) {
BaseDimensionalItemObject itemObj = new BaseDimensionalItemObject(uid);
itemObj.setName(name);
return itemObj;
}

@Override
public abstract void populateAnalyticalProperties();

Expand Down Expand Up @@ -700,15 +722,15 @@ protected Optional<DimensionalObject> getDimensionalObject(String dimension) {
ouList.addAll(transientOrganisationUnits);

if (userOrganisationUnit) {
ouList.add(new BaseDimensionalItemObject(KEY_USER_ORGUNIT));
ouList.add(USER_OU_ITEM_OBJ);
}

if (userOrganisationUnitChildren) {
ouList.add(new BaseDimensionalItemObject(KEY_USER_ORGUNIT_CHILDREN));
ouList.add(USER_OU_CHILDREN_ITEM_OBJ);
}

if (userOrganisationUnitGrandChildren) {
ouList.add(new BaseDimensionalItemObject(KEY_USER_ORGUNIT_GRANDCHILDREN));
ouList.add(USER_OU_GRANDCHILDREN_ITEM_OBJ);
}

if (organisationUnitLevels != null && !organisationUnitLevels.isEmpty()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,8 @@
*/
package org.hisp.dhis.common;

import static org.hisp.dhis.analytics.Aggregation.AGGREGATED;

import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
Expand All @@ -40,6 +42,7 @@
import java.util.stream.Collectors;
import lombok.AllArgsConstructor;
import lombok.NoArgsConstructor;
import org.hisp.dhis.analytics.Aggregation;
import org.hisp.dhis.dataelement.DataElement;
import org.hisp.dhis.dataelement.DataElementOperand;
import org.hisp.dhis.expressiondimensionitem.ExpressionDimensionItem;
Expand Down Expand Up @@ -113,16 +116,31 @@ public class DataDimensionItem {
@AllArgsConstructor
public static class Attributes implements Serializable {
/** The option item for this dimension item. * */
private OptionSetItem optionItem;
private OptionSetItem optionSetItem;

@JsonProperty
@JacksonXmlProperty(namespace = DxfNamespaces.DXF_2_0)
public OptionSetItem getOptionSetItem() {
return optionItem;
return optionSetItem;
}

/**
* This method ensure that existing persisted items will return default values, case the current
* {@link OptionSetItem} is null or does not have an {@link Aggregation} defined.
*
* @return the correct version of an {@link OptionSetItem}.
*/
public OptionSetItem getOptionSetItemOrDefault() {
if (optionSetItem != null) {
return new OptionSetItem(
optionSetItem.getOptions(), optionSetItem.getAggregationOrDefault());
}

return new OptionSetItem(Set.of(), AGGREGATED);
}

public void setOptionSetItem(OptionSetItem optionItem) {
this.optionItem = optionItem;
public void setOptionSetItem(OptionSetItem optionSetItem) {
this.optionSetItem = optionSetItem;
}
}

Expand Down Expand Up @@ -232,14 +250,17 @@ public DimensionalItemObject getDimensionalItemObject() {
}

/**
* Simply loads the internal attributes into the given item object.
* Simply loads the internal attributes into the given item object. Some objects, when null, will
* be loaded with their respective defaults.
*
* @param itemObject the {@link BaseDimensionalItemObject}.
*/
private void loadAttributes(BaseDimensionalItemObject itemObject) {
if (attributes != null) {
itemObject.setOptionSetItem(attributes.getOptionSetItem());
if (attributes == null) {
attributes = new Attributes();
}

itemObject.setOptionSetItem(attributes.getOptionSetItemOrDefault());
}

@JsonProperty
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
*/
package org.hisp.dhis.common;

import static org.hisp.dhis.analytics.Aggregation.AGGREGATED;
import static org.hisp.dhis.common.DxfNamespaces.DXF_2_0;

import com.fasterxml.jackson.annotation.JsonProperty;
Expand Down Expand Up @@ -71,4 +72,17 @@ public Aggregation getAggregation() {
public void setAggregation(Aggregation aggregation) {
this.aggregation = aggregation;
}

/**
* Returns the current {@link Aggregation} or default.
*
* @return the respective {@link Aggregation} object.
*/
public Aggregation getAggregationOrDefault() {
if (aggregation == null) {
return AGGREGATED;
}

return aggregation;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@
*/
package org.hisp.dhis.analytics.data;

import static java.util.stream.Collectors.toList;
import static org.apache.commons.collections4.CollectionUtils.addIgnoreNull;
import static org.apache.commons.collections4.CollectionUtils.isNotEmpty;
import static org.apache.commons.lang3.StringUtils.isNotEmpty;
Expand Down Expand Up @@ -67,6 +66,7 @@
import java.util.Optional;
import java.util.Set;
import java.util.stream.Stream;
import java.util.stream.Collectors;
import lombok.RequiredArgsConstructor;
import org.hisp.dhis.analytics.AnalyticsSecurityManager;
import org.hisp.dhis.analytics.DataQueryParams;
Expand Down Expand Up @@ -508,6 +508,6 @@ private List<DimensionalItemObject> getCategoryOptionComboList(
return items.stream()
.map(item -> idObjectManager.getObject(CategoryOptionCombo.class, inputIdScheme, item))
.filter(Objects::nonNull)
.collect(toList());
.collect(Collectors.toList());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -212,7 +212,7 @@ void testConditionWithBooleanAsBoolean() {
sql,
is(
"case when (coalesce("
+ "case when ax.\"ps\" = 'ProgrmStagA' then \"DataElmentE\" else null end::numeric!=0,false)) "
+ "case when ax.\"ps\" = 'ProgrmStagA' then \"DataElmentE\" else null end::numeric != 0,false)) "
+ "then 10 + 5 else 3 * 2 end"));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ private static String orderBy(List<Order> orders) {
orderJoiner.add(
order.getField() + " " + (order.getDirection().isAscending() ? "asc" : "desc"));
}
return " order by " + orderJoiner;
return " order by " + orderJoiner + ", " + DEFAULT_ORDER;
}

@Getter
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -57,23 +57,21 @@ public class HibernateEventChangeLogStore {
private static final String COLUMN_CHANGELOG_USER = "ecl.createdByUsername";
private static final String COLUMN_CHANGELOG_DATA_ELEMENT = "d.uid";
private static final String COLUMN_CHANGELOG_FIELD = "ecl.eventField";

private static final String ORDER_CHANGE_EXPRESSION =
"CONCAT(COALESCE(d.formName, ''), COALESCE(" + COLUMN_CHANGELOG_FIELD + ", ''))";
private static final String DEFAULT_ORDER =
COLUMN_CHANGELOG_CREATED + " " + SortDirection.DESC.getValue();

/**
* Event change logs can be ordered by given fields which correspond to fields on {@link
* EventChangeLog}. Maps fields to DB columns. The order implementation for change logs is
* different from other tracker exporters {@link EventChangeLog} is the view which is already
* returned from the service/store. Tracker exporter services return a representation we have to
* map to a view model. This mapping is not necessary for change logs.
* EventChangeLog}. Maps fields to DB columns, except when sorting by 'change'. In that case we
* need to sort by concatenation, to treat the dataElement and eventField as a single entity.
*/
private static final Map<String, String> ORDERABLE_FIELDS =
Map.ofEntries(
entry("createdAt", COLUMN_CHANGELOG_CREATED),
entry("username", COLUMN_CHANGELOG_USER),
entry("dataElement", COLUMN_CHANGELOG_DATA_ELEMENT),
entry("field", COLUMN_CHANGELOG_FIELD));
entry("change", ORDER_CHANGE_EXPRESSION));

private static final Map<Pair<String, Class<?>>, String> FILTERABLE_FIELDS =
Map.ofEntries(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1612,7 +1612,7 @@ private String getOrderQuery(EventQueryParams params) {
}

if (!orderFields.isEmpty()) {
return "order by " + StringUtils.join(orderFields, ',') + " ";
return "order by " + StringUtils.join(orderFields, ',') + ", " + DEFAULT_ORDER + " ";
} else {
return "order by " + DEFAULT_ORDER + " ";
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
import java.util.Set;
import java.util.function.Function;
import java.util.function.LongSupplier;
import java.util.stream.Stream;
import javax.annotation.Nonnull;
import org.apache.commons.collections4.CollectionUtils;
import org.hisp.dhis.common.IdentifiableObject;
Expand Down Expand Up @@ -264,10 +265,13 @@ private <T extends IdentifiableObject> String getRelationshipEntityType(T entity

private List<Order> orderBy(
RelationshipQueryParams queryParams, CriteriaBuilder builder, Root<Relationship> root) {
List<Order> defaultOrder = orderBy(List.of(DEFAULT_ORDER), builder, root);
if (!queryParams.getOrder().isEmpty()) {
return orderBy(queryParams.getOrder(), builder, root);
return Stream.concat(
orderBy(queryParams.getOrder(), builder, root).stream(), defaultOrder.stream())
.toList();
} else {
return orderBy(List.of(DEFAULT_ORDER), builder, root);
return defaultOrder;
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1007,10 +1007,10 @@ private String getQueryOrderBy(TrackedEntityQueryParams params, boolean innerOrd
}

if (!orderFields.isEmpty()) {
return "ORDER BY " + StringUtils.join(orderFields, ',') + SPACE;
return "ORDER BY " + StringUtils.join(orderFields, ',') + ", " + DEFAULT_ORDER + SPACE;
}

return "ORDER BY " + DEFAULT_ORDER + " ";
return "ORDER BY " + DEFAULT_ORDER + SPACE;
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,21 +236,22 @@ public String jsonExtract(String json, String key, String property) {
@Override
public String cast(String column, DataType dataType) {
return switch (dataType) {
case NUMERIC -> String.format("toDecimal64(%s, 8)", column); // 8 decimal places precision
case NUMERIC -> String.format("toFloat64(%s)", column);
case BOOLEAN ->
String.format("toUInt8(%s) != 0", column); // ClickHouse uses UInt8 for boolean
case TEXT -> String.format("toString(%s)", column);
};
}

@Override
public String age(String endDate, String startDate) {
throw new UnsupportedOperationException();
}

@Override
public String dateDifference(String startDate, String endDate, DateUnit dateUnit) {
throw new UnsupportedOperationException();
return switch (dateUnit) {
case DAYS -> String.format("dateDiff('day', %s, %s)", startDate, endDate);
case MINUTES -> String.format("dateDiff('minute', %s, %s)", startDate, endDate);
case MONTHS -> String.format("dateDiff('month', %s, %s)", startDate, endDate);
case YEARS -> String.format("dateDiff('year', %s, %s)", startDate, endDate);
case WEEKS -> String.format("dateDiff('week', %s, %s)", startDate, endDate);
};
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,12 +241,6 @@ public String cast(String column, DataType dataType) {
};
}

@Override
public String age(String endDate, String startDate) {
return String.format(
"TIMESTAMPDIFF(YEAR, cast(%s as date), cast(%s as date))", startDate, endDate);
}

@Override
public String dateDifference(String startDate, String endDate, DateUnit dateUnit) {
return switch (dateUnit) {
Expand Down
Loading

0 comments on commit a6cce87

Please sign in to comment.