You can use the following code snippets to create batch load tasks.
package com.example.tryit;
import java.util.Arrays;
import software.amazon.awssdk.services.timestreamwrite.model.CreateBatchLoadTaskRequest;
import software.amazon.awssdk.services.timestreamwrite.model.CreateBatchLoadTaskResponse;
import software.amazon.awssdk.services.timestreamwrite.model.DataModel;
import software.amazon.awssdk.services.timestreamwrite.model.DataModelConfiguration;
import software.amazon.awssdk.services.timestreamwrite.model.DataSourceConfiguration;
import software.amazon.awssdk.services.timestreamwrite.model.DataSourceS3Configuration;
import software.amazon.awssdk.services.timestreamwrite.model.DimensionMapping;
import software.amazon.awssdk.services.timestreamwrite.model.MultiMeasureAttributeMapping;
import software.amazon.awssdk.services.timestreamwrite.model.MultiMeasureMappings;
import software.amazon.awssdk.services.timestreamwrite.model.ReportConfiguration;
import software.amazon.awssdk.services.timestreamwrite.model.ReportS3Configuration;
import software.amazon.awssdk.services.timestreamwrite.model.ScalarMeasureValueType;
import software.amazon.awssdk.services.timestreamwrite.model.TimeUnit;
import software.amazon.awssdk.services.timestreamwrite.TimestreamWriteClient;
public class BatchLoadExample {
public static final String DATABASE_NAME = <database name>
;
public static final String TABLE_NAME = <table name>
;
public static final String INPUT_BUCKET = <S3 location>
;
public static final String INPUT_OBJECT_KEY_PREFIX = <CSV filename>
;
public static final String REPORT_BUCKET = <S3 location>
;
public static final long HT_TTL_HOURS = 24L;
public static final long CT_TTL_DAYS = 7L;
TimestreamWriteClient amazonTimestreamWrite;
public BatchLoadExample(TimestreamWriteClient client) {
this.amazonTimestreamWrite = client;
}
public String createBatchLoadTask() {
System.out.println("Creating batch load task");
CreateBatchLoadTaskRequest request = CreateBatchLoadTaskRequest.builder()
.dataModelConfiguration(DataModelConfiguration.builder()
.dataModel(DataModel.builder()
.timeColumn("timestamp")
.timeUnit(TimeUnit.SECONDS)
.dimensionMappings(Arrays.asList(
DimensionMapping.builder()
.sourceColumn("vehicle")
.build(),
DimensionMapping.builder()
.sourceColumn("registration")
.destinationColumn("license")
.build()))
.multiMeasureMappings(MultiMeasureMappings.builder()
.targetMultiMeasureName("mva_measure_name")
.multiMeasureAttributeMappings(Arrays.asList(
MultiMeasureAttributeMapping.builder()
.sourceColumn("wgt")
.targetMultiMeasureAttributeName("weight")
.measureValueType(ScalarMeasureValueType.DOUBLE)
.build(),
MultiMeasureAttributeMapping.builder()
.sourceColumn("spd")
.targetMultiMeasureAttributeName("speed")
.measureValueType(ScalarMeasureValueType.DOUBLE)
.build(),
MultiMeasureAttributeMapping.builder()
.sourceColumn("fuel")
.measureValueType(ScalarMeasureValueType.DOUBLE)
.build(),
MultiMeasureAttributeMapping.builder()
.sourceColumn("miles")
.measureValueType(ScalarMeasureValueType.DOUBLE)
.build()))
.build())
.build())
.build())
.dataSourceConfiguration(DataSourceConfiguration.builder()
.dataSourceS3Configuration(
DataSourceS3Configuration.builder()
.bucketName(INPUT_BUCKET)
.objectKeyPrefix(INPUT_OBJECT_KEY_PREFIX)
.build())
.dataFormat("CSV")
.build())
.reportConfiguration(ReportConfiguration.builder()
.reportS3Configuration(ReportS3Configuration.builder()
.bucketName(REPORT_BUCKET)
.build())
.build())
.targetDatabaseName(DATABASE_NAME)
.targetTableName(TABLE_NAME)
.build();
try {
final CreateBatchLoadTaskResponse createBatchLoadTaskResponse = amazonTimestreamWrite.createBatchLoadTask(request);
String taskId = createBatchLoadTaskResponse.taskId();
System.out.println("Successfully created batch load task: " + taskId);
return taskId;
} catch (Exception e) {
System.out.println("Failed to create batch load task: " + e);
throw e;
}
}
}