Création d'une requête planifiée - Amazon Timestream

Les traductions sont fournies par des outils de traduction automatique. En cas de conflit entre le contenu d'une traduction et celui de la version originale en anglais, la version anglaise prévaudra.

Création d'une requête planifiée

Vous pouvez utiliser les extraits de code suivants pour créer une requête planifiée avec un mappage à plusieurs mesures.

Java
public static String DATABASE_NAME = "devops_sample_application"; public static String TABLE_NAME = "host_metrics_sample_application"; public static String HOSTNAME = "host-24Gju"; public static String SQ_NAME = "daily-sample"; public static String SCHEDULE_EXPRESSION = "cron(0/2 * * * ? *)"; // Find the average, p90, p95, and p99 CPU utilization for a specific EC2 host over the past 2 hours. public static String QUERY = "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " + "ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " + "FROM " + DATABASE_NAME + "." + TABLE_NAME + " " + "WHERE measure_name = 'metrics' " + "AND hostname = '" + HOSTNAME + "' " + "AND time > ago(2h) " + "GROUP BY region, hostname, az, BIN(time, 15s) " + "ORDER BY binned_timestamp ASC " + "LIMIT 5"; public String createScheduledQuery(String topic_arn, String role_arn, String database_name, String table_name) { System.out.println("Creating Scheduled Query"); List<Pair<String, MeasureValueType>> sourceColToMeasureValueTypes = Arrays.asList( Pair.of("avg_cpu_utilization", DOUBLE), Pair.of("p90_cpu_utilization", DOUBLE), Pair.of("p95_cpu_utilization", DOUBLE), Pair.of("p99_cpu_utilization", DOUBLE)); CreateScheduledQueryRequest createScheduledQueryRequest = new CreateScheduledQueryRequest() .withName(SQ_NAME) .withQueryString(QUERY) .withScheduleConfiguration(new ScheduleConfiguration() .withScheduleExpression(SCHEDULE_EXPRESSION)) .withNotificationConfiguration(new NotificationConfiguration() .withSnsConfiguration(new SnsConfiguration() .withTopicArn(topic_arn))) .withTargetConfiguration(new TargetConfiguration().withTimestreamConfiguration(new TimestreamConfiguration() .withDatabaseName(database_name) .withTableName(table_name) .withTimeColumn("binned_timestamp") .withDimensionMappings(Arrays.asList( new DimensionMapping() .withName("region") .withDimensionValueType("VARCHAR"), new DimensionMapping() .withName("az") .withDimensionValueType("VARCHAR"), new DimensionMapping() .withName("hostname") .withDimensionValueType("VARCHAR") )) .withMultiMeasureMappings(new MultiMeasureMappings() .withTargetMultiMeasureName("multi-metrics") .withMultiMeasureAttributeMappings( sourceColToMeasureValueTypes.stream() .map(pair -> new MultiMeasureAttributeMapping() .withMeasureValueType(pair.getValue().name()) .withSourceColumn(pair.getKey())) .collect(Collectors.toList()))))) .withErrorReportConfiguration(new ErrorReportConfiguration() .withS3Configuration(new S3Configuration() .withBucketName(timestreamDependencyHelper.getS3ErrorReportBucketName()))) .withScheduledQueryExecutionRoleArn(role_arn); try { final CreateScheduledQueryResult createScheduledQueryResult = queryClient.createScheduledQuery(createScheduledQueryRequest); final String scheduledQueryArn = createScheduledQueryResult.getArn(); System.out.println("Successfully created scheduled query : " + scheduledQueryArn); return scheduledQueryArn; } catch (Exception e) { System.out.println("Scheduled Query creation failed: " + e); throw e; } }
Java v2
public static String DATABASE_NAME = "testJavaV2DB"; public static String TABLE_NAME = "testJavaV2Table"; public static String HOSTNAME = "host-24Gju"; public static String SQ_NAME = "daily-sample"; public static String SCHEDULE_EXPRESSION = "cron(0/2 * * * ? *)"; // Find the average, p90, p95, and p99 CPU utilization for a specific EC2 host over the past 2 hours. public static String VALID_QUERY = "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " + "ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " + "FROM " + DATABASE_NAME + "." + TABLE_NAME + " " + "WHERE measure_name = 'metrics' " + "AND hostname = '" + HOSTNAME + "' " + "AND time > ago(2h) " + "GROUP BY region, hostname, az, BIN(time, 15s) " + "ORDER BY binned_timestamp ASC " + "LIMIT 5"; private String createScheduledQueryHelper(String topicArn, String roleArn, String s3ErrorReportBucketName, String query, TargetConfiguration targetConfiguration) { System.out.println("Creating Scheduled Query"); CreateScheduledQueryRequest createScheduledQueryRequest = CreateScheduledQueryRequest.builder() .name(SQ_NAME) .queryString(query) .scheduleConfiguration(ScheduleConfiguration.builder() .scheduleExpression(SCHEDULE_EXPRESSION) .build()) .notificationConfiguration(NotificationConfiguration.builder() .snsConfiguration(SnsConfiguration.builder() .topicArn(topicArn) .build()) .build()) .targetConfiguration(targetConfiguration) .errorReportConfiguration(ErrorReportConfiguration.builder() .s3Configuration(S3Configuration.builder() .bucketName(s3ErrorReportBucketName) .objectKeyPrefix(SCHEDULED_QUERY_EXAMPLE) .build()) .build()) .scheduledQueryExecutionRoleArn(roleArn) .build(); try { final CreateScheduledQueryResponse response = queryClient.createScheduledQuery(createScheduledQueryRequest); final String scheduledQueryArn = response.arn(); System.out.println("Successfully created scheduled query : " + scheduledQueryArn); return scheduledQueryArn; } catch (Exception e) { System.out.println("Scheduled Query creation failed: " + e); throw e; } } public String createScheduledQuery(String topicArn, String roleArn, String databaseName, String tableName, String s3ErrorReportBucketName) { List<Pair<String, MeasureValueType>> sourceColToMeasureValueTypes = Arrays.asList( Pair.of("avg_cpu_utilization", DOUBLE), Pair.of("p90_cpu_utilization", DOUBLE), Pair.of("p95_cpu_utilization", DOUBLE), Pair.of("p99_cpu_utilization", DOUBLE)); TargetConfiguration targetConfiguration = TargetConfiguration.builder() .timestreamConfiguration(TimestreamConfiguration.builder() .databaseName(databaseName) .tableName(tableName) .timeColumn("binned_timestamp") .dimensionMappings(Arrays.asList( DimensionMapping.builder() .name("region") .dimensionValueType("VARCHAR") .build(), DimensionMapping.builder() .name("az") .dimensionValueType("VARCHAR") .build(), DimensionMapping.builder() .name("hostname") .dimensionValueType("VARCHAR") .build() )) .multiMeasureMappings(MultiMeasureMappings.builder() .targetMultiMeasureName("multi-metrics") .multiMeasureAttributeMappings( sourceColToMeasureValueTypes.stream() .map(pair -> MultiMeasureAttributeMapping.builder() .measureValueType(pair.getValue().name()) .sourceColumn(pair.getKey()) .build()) .collect(Collectors.toList())) .build()) .build()) .build(); return createScheduledQueryHelper(topicArn, roleArn, s3ErrorReportBucketName, VALID_QUERY, targetConfiguration); }}
Go
SQ_ERROR_CONFIGURATION_S3_BUCKET_NAME_PREFIX = "sq-error-configuration-sample-s3-bucket-" HOSTNAME = "host-24Gju" SQ_NAME = "daily-sample" SCHEDULE_EXPRESSION = "cron(0/1 * * * ? *)" QUERY = "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " + "ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " + "FROM %s.%s " + "WHERE measure_name = 'metrics' " + "AND hostname = '" + HOSTNAME + "' " + "AND time > ago(2h) " + "GROUP BY region, hostname, az, BIN(time, 15s) " + "ORDER BY binned_timestamp ASC " + "LIMIT 5" s3BucketName = utils.SQ_ERROR_CONFIGURATION_S3_BUCKET_NAME_PREFIX + generateRandomStringWithSize(5) func generateRandomStringWithSize(size int) string { rand.Seed(time.Now().UnixNano()) alphaNumericList := []rune("abcdefghijklmnopqrstuvwxyz0123456789") randomPrefix := make([]rune, size) for i := range randomPrefix { randomPrefix[i] = alphaNumericList[rand.Intn(len(alphaNumericList))] } return string(randomPrefix) } func (timestreamBuilder TimestreamBuilder) createScheduledQuery(topicArn string, roleArn string, s3ErrorReportBucketName string, query string, targetConfiguration timestreamquery.TargetConfiguration) (string, error) { createScheduledQueryInput := &timestreamquery.CreateScheduledQueryInput{ Name: aws.String(SQ_NAME), QueryString: aws.String(query), ScheduleConfiguration: &timestreamquery.ScheduleConfiguration{ ScheduleExpression: aws.String(SCHEDULE_EXPRESSION), }, NotificationConfiguration: &timestreamquery.NotificationConfiguration{ SnsConfiguration: &timestreamquery.SnsConfiguration{ TopicArn: aws.String(topicArn), }, }, TargetConfiguration: &targetConfiguration, ErrorReportConfiguration: &timestreamquery.ErrorReportConfiguration{ S3Configuration: &timestreamquery.S3Configuration{ BucketName: aws.String(s3ErrorReportBucketName), }, }, ScheduledQueryExecutionRoleArn: aws.String(roleArn), } createScheduledQueryOutput, err := timestreamBuilder.QuerySvc.CreateScheduledQuery(createScheduledQueryInput) if err != nil { fmt.Printf("Error: %s", err.Error()) } else { fmt.Println("createScheduledQueryResult is successful") return *createScheduledQueryOutput.Arn, nil } return "", err } func (timestreamBuilder TimestreamBuilder) CreateValidScheduledQuery(topicArn string, roleArn string, s3ErrorReportBucketName string, sqDatabaseName string, sqTableName string, databaseName string, tableName string) (string, error) { targetConfiguration := timestreamquery.TargetConfiguration{ TimestreamConfiguration: &timestreamquery.TimestreamConfiguration{ DatabaseName: aws.String(sqDatabaseName), TableName: aws.String(sqTableName), TimeColumn: aws.String("binned_timestamp"), DimensionMappings: []*timestreamquery.DimensionMapping{ { Name: aws.String("region"), DimensionValueType: aws.String("VARCHAR"), }, { Name: aws.String("az"), DimensionValueType: aws.String("VARCHAR"), }, { Name: aws.String("hostname"), DimensionValueType: aws.String("VARCHAR"), }, }, MultiMeasureMappings: &timestreamquery.MultiMeasureMappings{ TargetMultiMeasureName: aws.String("multi-metrics"), MultiMeasureAttributeMappings: []*timestreamquery.MultiMeasureAttributeMapping{ { SourceColumn: aws.String("avg_cpu_utilization"), MeasureValueType: aws.String(timestreamquery.MeasureValueTypeDouble), }, { SourceColumn: aws.String("p90_cpu_utilization"), MeasureValueType: aws.String(timestreamquery.MeasureValueTypeDouble), }, { SourceColumn: aws.String("p95_cpu_utilization"), MeasureValueType: aws.String(timestreamquery.MeasureValueTypeDouble), }, { SourceColumn: aws.String("p99_cpu_utilization"), MeasureValueType: aws.String(timestreamquery.MeasureValueTypeDouble), }, }, }, }, } return timestreamBuilder.createScheduledQuery(topicArn, roleArn, s3ErrorReportBucketName, fmt.Sprintf(QUERY, databaseName, tableName), targetConfiguration) }
Python
HOSTNAME = "host-24Gju" SQ_NAME = "daily-sample" ERROR_BUCKET_NAME = "scheduledquerysamplerrorbucket" + ''.join([choice(ascii_lowercase) for _ in range(5)]) QUERY = \ "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " \ " ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " \ " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " \ " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " \ " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " \ "FROM " + database_name + "." + table_name + " " \ "WHERE measure_name = 'metrics' " \ "AND hostname = '" + self.HOSTNAME + "' " \ "AND time > ago(2h) " \ "GROUP BY region, hostname, az, BIN(time, 15s) " \ "ORDER BY binned_timestamp ASC " \ "LIMIT 5" def create_scheduled_query_helper(self, topic_arn, role_arn, query, target_configuration): print("\nCreating Scheduled Query") schedule_configuration = { 'ScheduleExpression': 'cron(0/2 * * * ? *)' } notification_configuration = { 'SnsConfiguration': { 'TopicArn': topic_arn } } error_report_configuration = { 'S3Configuration': { 'BucketName': ERROR_BUCKET_NAME } } try: create_scheduled_query_response = \ query_client.create_scheduled_query(Name=self.SQ_NAME, QueryString=query, ScheduleConfiguration=schedule_configuration, NotificationConfiguration=notification_configuration, TargetConfiguration=target_configuration, ScheduledQueryExecutionRoleArn=role_arn, ErrorReportConfiguration=error_report_configuration ) print("Successfully created scheduled query : ", create_scheduled_query_response['Arn']) return create_scheduled_query_response['Arn'] except Exception as err: print("Scheduled Query creation failed:", err) raise err def create_valid_scheduled_query(self, topic_arn, role_arn): target_configuration = { 'TimestreamConfiguration': { 'DatabaseName': self.sq_database_name, 'TableName': self.sq_table_name, 'TimeColumn': 'binned_timestamp', 'DimensionMappings': [ {'Name': 'region', 'DimensionValueType': 'VARCHAR'}, {'Name': 'az', 'DimensionValueType': 'VARCHAR'}, {'Name': 'hostname', 'DimensionValueType': 'VARCHAR'} ], 'MultiMeasureMappings': { 'TargetMultiMeasureName': 'target_name', 'MultiMeasureAttributeMappings': [ {'SourceColumn': 'avg_cpu_utilization', 'MeasureValueType': 'DOUBLE', 'TargetMultiMeasureAttributeName': 'avg_cpu_utilization'}, {'SourceColumn': 'p90_cpu_utilization', 'MeasureValueType': 'DOUBLE', 'TargetMultiMeasureAttributeName': 'p90_cpu_utilization'}, {'SourceColumn': 'p95_cpu_utilization', 'MeasureValueType': 'DOUBLE', 'TargetMultiMeasureAttributeName': 'p95_cpu_utilization'}, {'SourceColumn': 'p99_cpu_utilization', 'MeasureValueType': 'DOUBLE', 'TargetMultiMeasureAttributeName': 'p99_cpu_utilization'}, ] } } } return self.create_scheduled_query_helper(topic_arn, role_arn, QUERY, target_configuration)
Node.js

L'extrait de code suivant utilise le style AWS SDK for JavaScript V2. Il est basé sur l'exemple d'application disponible sur Node.js, un exemple d'Amazon Timestream LiveAnalytics pour une application sur. GitHub

const DATABASE_NAME = 'devops_sample_application'; const TABLE_NAME = 'host_metrics_sample_application'; const SQ_DATABASE_NAME = 'sq_result_database'; const SQ_TABLE_NAME = 'sq_result_table'; const HOSTNAME = "host-24Gju"; const SQ_NAME = "daily-sample"; const SCHEDULE_EXPRESSION = "cron(0/1 * * * ? *)"; // Find the average, p90, p95, and p99 CPU utilization for a specific EC2 host over the past 2 hours. const VALID_QUERY = "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " + " ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " + " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " + " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " + " ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " + "FROM " + DATABASE_NAME + "." + TABLE_NAME + " " + "WHERE measure_name = 'metrics' " + " AND hostname = '" + HOSTNAME + "' " + " AND time > ago(2h) " + "GROUP BY region, hostname, az, BIN(time, 15s) " + "ORDER BY binned_timestamp ASC " + "LIMIT 5"; async function createScheduledQuery(topicArn, roleArn, s3ErrorReportBucketName) { console.log("Creating Valid Scheduled Query"); const DimensionMappingList = [{ 'Name': 'region', 'DimensionValueType': 'VARCHAR' }, { 'Name': 'az', 'DimensionValueType': 'VARCHAR' }, { 'Name': 'hostname', 'DimensionValueType': 'VARCHAR' } ]; const MultiMeasureMappings = { TargetMultiMeasureName: "multi-metrics", MultiMeasureAttributeMappings: [{ 'SourceColumn': 'avg_cpu_utilization', 'MeasureValueType': 'DOUBLE' }, { 'SourceColumn': 'p90_cpu_utilization', 'MeasureValueType': 'DOUBLE' }, { 'SourceColumn': 'p95_cpu_utilization', 'MeasureValueType': 'DOUBLE' }, { 'SourceColumn': 'p99_cpu_utilization', 'MeasureValueType': 'DOUBLE' }, ] } const timestreamConfiguration = { DatabaseName: SQ_DATABASE_NAME, TableName: SQ_TABLE_NAME, TimeColumn: "binned_timestamp", DimensionMappings: DimensionMappingList, MultiMeasureMappings: MultiMeasureMappings } const createScheduledQueryRequest = { Name: SQ_NAME, QueryString: VALID_QUERY, ScheduleConfiguration: { ScheduleExpression: SCHEDULE_EXPRESSION }, NotificationConfiguration: { SnsConfiguration: { TopicArn: topicArn } }, TargetConfiguration: { TimestreamConfiguration: timestreamConfiguration }, ScheduledQueryExecutionRoleArn: roleArn, ErrorReportConfiguration: { S3Configuration: { BucketName: s3ErrorReportBucketName } } }; try { const data = await queryClient.createScheduledQuery(createScheduledQueryRequest).promise(); console.log("Successfully created scheduled query: " + data.Arn); return data.Arn; } catch (err) { console.log("Scheduled Query creation failed: ", err); throw err; } }
.NET
public const string Hostname = "host-24Gju"; public const string SqName = "timestream-sample"; public const string SqDatabaseName = "sq_result_database"; public const string SqTableName = "sq_result_table"; public const string ErrorConfigurationS3BucketNamePrefix = "error-configuration-sample-s3-bucket-"; public const string ScheduleExpression = "cron(0/2 * * * ? *)"; // Find the average, p90, p95, and p99 CPU utilization for a specific EC2 host over the past 2 hours. public const string ValidQuery = "SELECT region, az, hostname, BIN(time, 15s) AS binned_timestamp, " + "ROUND(AVG(cpu_utilization), 2) AS avg_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.9), 2) AS p90_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.95), 2) AS p95_cpu_utilization, " + "ROUND(APPROX_PERCENTILE(cpu_utilization, 0.99), 2) AS p99_cpu_utilization " + "FROM " + Constants.DATABASE_NAME + "." + Constants.TABLE_NAME + " " + "WHERE measure_name = 'metrics' " + "AND hostname = '" + Hostname + "' " + "AND time > ago(2h) " + "GROUP BY region, hostname, az, BIN(time, 15s) " + "ORDER BY binned_timestamp ASC " + "LIMIT 5"; private async Task<String> CreateValidScheduledQuery(string topicArn, string roleArn, string databaseName, string tableName, string s3ErrorReportBucketName) { List<MultiMeasureAttributeMapping> sourceColToMeasureValueTypes = new List<MultiMeasureAttributeMapping>() { new() { SourceColumn = "avg_cpu_utilization", MeasureValueType = MeasureValueType.DOUBLE.Value }, new() { SourceColumn = "p90_cpu_utilization", MeasureValueType = MeasureValueType.DOUBLE.Value }, new() { SourceColumn = "p95_cpu_utilization", MeasureValueType = MeasureValueType.DOUBLE.Value }, new() { SourceColumn = "p99_cpu_utilization", MeasureValueType = MeasureValueType.DOUBLE.Value } }; TargetConfiguration targetConfiguration = new TargetConfiguration() { TimestreamConfiguration = new TimestreamConfiguration() { DatabaseName = databaseName, TableName = tableName, TimeColumn = "binned_timestamp", DimensionMappings = new List<DimensionMapping>() { new() { Name = "region", DimensionValueType = "VARCHAR" }, new() { Name = "az", DimensionValueType = "VARCHAR" }, new() { Name = "hostname", DimensionValueType = "VARCHAR" } }, MultiMeasureMappings = new MultiMeasureMappings() { TargetMultiMeasureName = "multi-metrics", MultiMeasureAttributeMappings = sourceColToMeasureValueTypes } } }; return await CreateScheduledQuery(topicArn, roleArn, s3ErrorReportBucketName, ScheduledQueryConstants.ValidQuery, targetConfiguration); } private async Task<String> CreateScheduledQuery(string topicArn, string roleArn, string s3ErrorReportBucketName, string query, TargetConfiguration targetConfiguration) { try { Console.WriteLine("Creating Scheduled Query"); CreateScheduledQueryResponse response = await _amazonTimestreamQuery.CreateScheduledQueryAsync( new CreateScheduledQueryRequest() { Name = ScheduledQueryConstants.SqName, QueryString = query, ScheduleConfiguration = new ScheduleConfiguration() { ScheduleExpression = ScheduledQueryConstants.ScheduleExpression }, NotificationConfiguration = new NotificationConfiguration() { SnsConfiguration = new SnsConfiguration() { TopicArn = topicArn } }, TargetConfiguration = targetConfiguration, ErrorReportConfiguration = new ErrorReportConfiguration() { S3Configuration = new S3Configuration() { BucketName = s3ErrorReportBucketName } }, ScheduledQueryExecutionRoleArn = roleArn }); Console.WriteLine($"Successfully created scheduled query : {response.Arn}"); return response.Arn; } catch (Exception e) { Console.WriteLine($"Scheduled Query creation failed: {e}"); throw; } }