From 34648470b48fc14e3ceef3a9f8e1dc2129d9b60e Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 May 2021 08:06:50 +1000 Subject: [PATCH 1/2] Deal with missing v2.12.0 release branch in scala/scala --- .../src/main/java/scala/bench/GitWalker.java | 23 ++++++++++++------- 1 file changed, 15 insertions(+), 8 deletions(-) diff --git a/infrastructure/src/main/java/scala/bench/GitWalker.java b/infrastructure/src/main/java/scala/bench/GitWalker.java index 27fc179..d0ed255 100644 --- a/infrastructure/src/main/java/scala/bench/GitWalker.java +++ b/infrastructure/src/main/java/scala/bench/GitWalker.java @@ -29,12 +29,16 @@ public static GitWalkerResult walk(Repository repo) { .retentionPolicy("autogen") .consistency(InfluxDB.ConsistencyLevel.ALL) .build(); - createPoints("2.13.x", "fc1aea6712", batchPoints, repo, branchesMap); - createPoints("2.12.x", "132a0587ab", batchPoints, repo, branchesMap); - createPoints("v2.12.0", "05016d9035", batchPoints, repo, branchesMap); - createPoints("2.11.x", "7ac15a1210", batchPoints, repo, branchesMap); - createPoints("2.10.x", "cc672b023e", batchPoints, repo, branchesMap); - createPoints("2.9.x", "33e1dac4e4", batchPoints, repo, branchesMap); + createPoints("2.13.x", null,"fc1aea6712", batchPoints, repo, branchesMap); + createPoints("2.12.x", null, "132a0587ab", batchPoints, repo, branchesMap); + + // There used to be a release branch v2.12.0. Looks to be deleted now? Use the v2.12.0 tag as the tip of this + // virtual branch. + createPoints("v2.12.0", "9a6ace1637053c094bfd395de540fe43c658b335","05016d9035", batchPoints, repo, branchesMap); + + createPoints("2.11.x", null, "7ac15a1210", batchPoints, repo, branchesMap); + createPoints("2.10.x", null, "cc672b023e", batchPoints, repo, branchesMap); + createPoints("2.9.x", null, "33e1dac4e4", batchPoints, repo, branchesMap); return new GitWalkerResult(batchPoints, branchesMap, repo); } @@ -50,14 +54,17 @@ private static int countParentsWithSameCommitTime(RevCommit revCommit) { static long adjustCommitTime(RevCommit revCommit) { int numParentsWithSameCommitTime = countParentsWithSameCommitTime(revCommit); - return (long) revCommit.getCommitTime() * 1000L + numParentsWithSameCommitTime * 10; + return (long) revCommit.getCommitTime() * 1000L + numParentsWithSameCommitTime * 10L; } public void upload(BatchPoints batchPoints) { } - private static BatchPoints createPoints(String branch, String forkPoint, BatchPoints batchPoints, Repository repo, Map branchesMap) { + private static BatchPoints createPoints(String branch, String branchRef, String forkPoint, BatchPoints batchPoints, Repository repo, Map branchesMap) { + if (branchRef != null) { + branch = branchRef; + } try { ObjectId resolvedBranch = resolve(branch, repo); ObjectId resolvedForkPoint = resolve(forkPoint, repo); From bd8aabc0e465ffb9205354f01daefeb83e782912 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Thu, 20 May 2021 08:07:42 +1000 Subject: [PATCH 2/2] Don't turn a integral field into floating point one during migration. --- .../src/main/java/scala/bench/DataMigrator.java | 9 +++++++-- 1 file changed, 7 insertions(+), 2 deletions(-) diff --git a/infrastructure/src/main/java/scala/bench/DataMigrator.java b/infrastructure/src/main/java/scala/bench/DataMigrator.java index 964b271..a83b455 100644 --- a/infrastructure/src/main/java/scala/bench/DataMigrator.java +++ b/infrastructure/src/main/java/scala/bench/DataMigrator.java @@ -6,7 +6,6 @@ import org.influxdb.dto.Query; import org.influxdb.dto.QueryResult; -import java.io.IOException; import java.time.Instant; import java.util.*; import java.util.concurrent.TimeUnit; @@ -54,7 +53,13 @@ public static void main(String[] args) { LinkedHashMap newFieldsMap = new LinkedHashMap<>(); assert (newFieldNames.size() == newValues.size()); for (int i = 0; i < newFieldNames.size(); i++) { - newFieldsMap.put(newFieldNames.get(i), newValues.get(i)); + String fieldName = newFieldNames.get(i); + boolean isLong = fieldName.equals("sampleCount"); + if (isLong) { + newFieldsMap.put(fieldName, ((Number) newValues.get(i)).longValue()); + } else { + newFieldsMap.put(fieldName, newValues.get(i)); + } } builder.fields(newFieldsMap); Instant parse = Instant.parse(time);