Skip to content

Commit

Permalink
fix failing mvn tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mozzy11 committed Jan 5, 2025
1 parent 84121c5 commit 73e0630
Show file tree
Hide file tree
Showing 6 changed files with 12 additions and 7 deletions.
2 changes: 2 additions & 0 deletions cloudbuild.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -206,6 +206,7 @@ steps:
- FHIRDATA_GENERATEPARQUETFILES=true
- FHIRDATA_NUMTHREADS=-1
- FHIRDATA_CREATEHIVERESOURCETABLES=true
- FHIRDATA_CREATEPARQUETVIEWS=true
- FHIRDATA_SINKDBCONFIGPATH=config/hapi-postgres-config_local_views.json
args: [ '-f', './docker/compose-controller-spark-sql-single.yaml', 'up',
'--force-recreate', '-d' ]
Expand Down Expand Up @@ -248,6 +249,7 @@ steps:
- FHIRDATA_GENERATEPARQUETFILES=false
- FHIRDATA_NUMTHREADS=1
- FHIRDATA_CREATEHIVERESOURCETABLES=false
- FHIRDATA_CREATEPARQUETVIEWS=false
- FHIRDATA_SINKDBCONFIGPATH=
args: [ '-f', './docker/compose-controller-spark-sql-single.yaml', 'up',
'--force-recreate', '--no-deps' , '-d' ,'pipeline-controller' ]
Expand Down
1 change: 1 addition & 0 deletions docker/compose-controller-spark-sql-single.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,7 @@ services:
- FHIRDATA_GENERATEPARQUETFILES=$FHIRDATA_GENERATEPARQUETFILES
- FHIRDATA_NUMTHREADS=$FHIRDATA_NUMTHREADS
- FHIRDATA_CREATEHIVERESOURCETABLES=$FHIRDATA_CREATEHIVERESOURCETABLES
- FHIRDATA_CREATEPARQUETVIEWS=$FHIRDATA_CREATEPARQUETVIEWS
- FHIRDATA_SINKDBCONFIGPATH=$FHIRDATA_SINKDBCONFIGPATH
ports:
- '8090:8080'
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2024 Google LLC
* Copyright 2020-2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2024 Google LLC
* Copyright 2020-2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand All @@ -21,6 +21,7 @@
import ca.uhn.fhir.parser.IParser;
import com.cerner.bunsen.exception.ProfileException;
import com.google.common.annotations.VisibleForTesting;
import com.google.common.base.Strings;
import com.google.fhir.analytics.JdbcConnectionPools.DataSourceConfig;
import com.google.fhir.analytics.model.DatabaseConfiguration;
import com.google.fhir.analytics.view.ViewApplicationException;
Expand Down Expand Up @@ -212,7 +213,7 @@ public void setup() throws SQLException, ProfileException {
oAuthClientSecret,
fhirContext);
fhirSearchUtil = new FhirSearchUtil(fetchUtil);
if (generateParquetFiles) {
if (generateParquetFiles && !Strings.isNullOrEmpty(parquetFile)) {
parquetUtil =
new ParquetUtil(
fhirContext.getVersion().getVersion(),
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2024 Google LLC
* Copyright 2020-2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -270,7 +270,7 @@ public interface FhirEtlOptions extends BasePipelineOptions {
@Default.Boolean(true)
Boolean isGenerateParquetFiles();

void setGenrateParquetFiles(Boolean value);
void setGenerateParquetFiles(Boolean value);

void setSourceNdjsonFilePatternList(String value);
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
/*
* Copyright 2020-2024 Google LLC
* Copyright 2020-2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
Expand Down Expand Up @@ -140,6 +140,7 @@ void validateProperties() {
Preconditions.checkState(fhirVersion != null, "FhirVersion cannot be empty");
Preconditions.checkState(!createHiveResourceTables || !thriftserverHiveConfig.isEmpty());
Preconditions.checkState(!createHiveResourceTables || generateParquetFiles);
Preconditions.checkState(!createParquetViews || generateParquetFiles);
}

private PipelineConfig.PipelineConfigBuilder addFlinkOptions(FhirEtlOptions options) {
Expand Down Expand Up @@ -217,7 +218,7 @@ PipelineConfig createBatchOptions() {
String timestampSuffix = DwhFiles.safeTimestampSuffix();
options.setOutputParquetPath(dwhRootPrefix + DwhFiles.TIMESTAMP_PREFIX + timestampSuffix);

options.setGenrateParquetFiles(generateParquetFiles);
options.setGenerateParquetFiles(generateParquetFiles);

PipelineConfig.PipelineConfigBuilder pipelineConfigBuilder = addFlinkOptions(options);

Expand Down

0 comments on commit 73e0630

Please sign in to comment.