Skip to content

Commit

Permalink
Move createResultWriters method
Browse files Browse the repository at this point in the history
Into the RegionalAnalysisController
  • Loading branch information
trevorgerhardt committed Nov 29, 2023
1 parent ca1ba48 commit 0a6f9b1
Show file tree
Hide file tree
Showing 4 changed files with 71 additions and 70 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -455,7 +455,7 @@ public void handleRegionalWorkResult(RegionalWorkResult workResult) {

// Store all result files permanently.
for (var resultFile : resultFiles.entrySet()) {
this.fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue());
fileStorage.moveIntoStorage(resultFile.getKey(), resultFile.getValue());
}
} catch (Throwable t) {
recordJobError(job, ExceptionUtils.stackTraceString(t));
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,8 +11,14 @@
import com.conveyal.analysis.models.OpportunityDataset;
import com.conveyal.analysis.models.RegionalAnalysis;
import com.conveyal.analysis.persistence.Persistence;
import com.conveyal.analysis.results.AccessCsvResultWriter;
import com.conveyal.analysis.results.CsvResultType;
import com.conveyal.analysis.results.GridResultWriter;
import com.conveyal.analysis.results.MultiOriginAssembler;
import com.conveyal.analysis.results.PathCsvResultWriter;
import com.conveyal.analysis.results.RegionalResultWriter;
import com.conveyal.analysis.results.TemporalDensityCsvResultWriter;
import com.conveyal.analysis.results.TimeCsvResultWriter;
import com.conveyal.analysis.util.JsonUtil;
import com.conveyal.file.FileStorage;
import com.conveyal.file.FileStorageFormat;
Expand Down Expand Up @@ -47,6 +53,7 @@
import static com.conveyal.analysis.util.JsonUtil.toJson;
import static com.conveyal.file.FileCategory.BUNDLES;
import static com.conveyal.file.FileCategory.RESULTS;
import static com.conveyal.r5.common.Util.notNullOrEmpty;
import static com.conveyal.r5.transit.TransportNetworkCache.getScenarioFilename;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
Expand Down Expand Up @@ -459,7 +466,7 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
// In fact, there are three separate classes all containing almost the same info:
// AnalysisRequest (from UI to backend), RegionalTask (template sent to worker), RegionalAnalysis (in Mongo).
// And for regional analyses, two instances of the worker task: the one with the scenario, and the templateTask.
RegionalAnalysis regionalAnalysis = new RegionalAnalysis();
final RegionalAnalysis regionalAnalysis = new RegionalAnalysis();
regionalAnalysis.request = task;
regionalAnalysis.height = task.height;
regionalAnalysis.north = task.north;
Expand Down Expand Up @@ -508,13 +515,16 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro

// Persist this newly created RegionalAnalysis to Mongo.
// This assigns it creation/update time stamps and an ID, which is needed to name any output CSV files.
regionalAnalysis = Persistence.regionalAnalyses.create(regionalAnalysis);
Persistence.regionalAnalyses.create(regionalAnalysis);

// Create the regional job
var regionalJob = new Job(task, WorkerTags.fromRegionalAnalysis(regionalAnalysis));

// Create the result writers
var writers = createResultWriters(regionalAnalysis, task);

// Create the multi-origin assembler with the writers.
var assembler = new MultiOriginAssembler(regionalJob, regionalAnalysis.createResultWriters(task));
var assembler = new MultiOriginAssembler(regionalJob, writers);

// Stored scenario is needed by workers. Must be done ahead of enqueueing the job.
storeRegionalAnalysisScenarioJson(task);
Expand All @@ -530,6 +540,57 @@ private RegionalAnalysis createRegionalAnalysis (Request req, Response res) thro
return regionalAnalysis;
}

/**
* Create results writers for this regional analysis and a task. Stores the result paths that are created by the
* writers.
*/
private static List<RegionalResultWriter> createResultWriters(RegionalAnalysis analysis, RegionalTask task) {
// Create the result writers. Store their result file paths in the database.
var resultWriters = new ArrayList<RegionalResultWriter>();
if (!task.makeTauiSite) {
if (task.recordAccessibility) {
if (task.originPointSet != null) {
// Freeform origins - create CSV regional analysis results
var accessWriter = new AccessCsvResultWriter(task);
resultWriters.add(accessWriter);
analysis.resultStorage.put(accessWriter.resultType(), accessWriter.getFileName());
} else {
// Gridded origins - create gridded regional analysis results
resultWriters.addAll(GridResultWriter.createWritersFromTask(analysis.destinationPointSetIds, task));
}
}

if (task.recordTimes) {
var timesWriter = new TimeCsvResultWriter(task);
resultWriters.add(timesWriter);
analysis.resultStorage.put(timesWriter.resultType(), timesWriter.getFileName());
}

if (task.includePathResults) {
var pathsWriter = new PathCsvResultWriter(task);
resultWriters.add(pathsWriter);
analysis.resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName());
}

if (task.includeTemporalDensity) {
if (task.originPointSet == null) {
// Gridded origins. The full temporal density information is probably too voluminous to be useful.
// We might want to record a grid of dual accessibility values, but this will require some serious
// refactoring of the GridResultWriter.
// if (job.templateTask.dualAccessibilityThreshold > 0) { ... }
throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points.");
} else {
var tDensityWriter = new TemporalDensityCsvResultWriter(task);
resultWriters.add(tDensityWriter);
analysis.resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName());
}
}

checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file.");
}
return resultWriters;
}

/**
* Store the regional analysis scenario as JSON for retrieval by the workers.
*/
Expand Down
61 changes: 0 additions & 61 deletions src/main/java/com/conveyal/analysis/models/RegionalAnalysis.java
Original file line number Diff line number Diff line change
@@ -1,23 +1,12 @@
package com.conveyal.analysis.models;

import com.conveyal.analysis.results.AccessCsvResultWriter;
import com.conveyal.analysis.results.CsvResultType;
import com.conveyal.analysis.results.GridResultWriter;
import com.conveyal.analysis.results.PathCsvResultWriter;
import com.conveyal.analysis.results.RegionalResultWriter;
import com.conveyal.analysis.results.TemporalDensityCsvResultWriter;
import com.conveyal.analysis.results.TimeCsvResultWriter;
import com.conveyal.r5.analyst.cluster.RegionalTask;
import org.locationtech.jts.geom.Geometry;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

import static com.conveyal.r5.common.Util.notNullOrEmpty;
import static com.google.common.base.Preconditions.checkArgument;

/**
* Represents a single regional (multi-origin) accessibility analysis,
* which may have more than one percentile and cutoff.
Expand Down Expand Up @@ -112,56 +101,6 @@ public class RegionalAnalysis extends Model implements Cloneable {
*/
public Map<CsvResultType, String> resultStorage = new HashMap<>();

/**
* Create results writers for this regional analysis and a task. Stores the result paths that are created by the
* writers.
*/
public List<RegionalResultWriter> createResultWriters(RegionalTask task) {
// Create the result writers. Store their result file paths in the database.
var resultWriters = new ArrayList<RegionalResultWriter>();
if (!task.makeTauiSite) {
if (task.recordAccessibility) {
if (task.originPointSet != null) {
// Freeform origins - create CSV regional analysis results
var accessWriter = new AccessCsvResultWriter(task);
resultWriters.add(accessWriter);
resultStorage.put(accessWriter.resultType(), accessWriter.getFileName());
} else {
// Gridded origins - create gridded regional analysis results
resultWriters.addAll(GridResultWriter.createWritersFromTask(this, task));
}
}

if (task.recordTimes) {
var timesWriter = new TimeCsvResultWriter(task);
resultWriters.add(timesWriter);
resultStorage.put(timesWriter.resultType(), timesWriter.getFileName());
}

if (task.includePathResults) {
var pathsWriter = new PathCsvResultWriter(task);
resultWriters.add(pathsWriter);
resultStorage.put(pathsWriter.resultType(), pathsWriter.getFileName());
}

if (task.includeTemporalDensity) {
if (task.originPointSet == null) {
// Gridded origins. The full temporal density information is probably too voluminous to be useful.
// We might want to record a grid of dual accessibility values, but this will require some serious
// refactoring of the GridResultWriter.
// if (job.templateTask.dualAccessibilityThreshold > 0) { ... }
throw new RuntimeException("Temporal density of opportunities cannot be recorded for gridded origin points.");
} else {
var tDensityWriter = new TemporalDensityCsvResultWriter(task);
resultWriters.add(tDensityWriter);
resultStorage.put(tDensityWriter.resultType(), tDensityWriter.getFileName());
}
}
checkArgument(notNullOrEmpty(resultWriters), "A regional analysis should always create at least one grid or CSV file.");
}
return resultWriters;
}

public RegionalAnalysis clone () {
try {
return (RegionalAnalysis) super.clone();
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
package com.conveyal.analysis.results;

import com.conveyal.analysis.models.RegionalAnalysis;
import com.conveyal.file.FileCategory;
import com.conveyal.file.FileStorageKey;
import com.conveyal.file.FileUtils;
Expand Down Expand Up @@ -76,14 +75,14 @@ public class GridResultWriter implements RegionalResultWriter {
* We create one GridResultWriter for each destination pointset and percentile.
* Each of those output files contains data for all specified travel time cutoffs at each origin.
*/
public static List<GridResultWriter> createWritersFromTask(RegionalAnalysis regionalAnalysis, RegionalTask task) {
public static List<GridResultWriter> createWritersFromTask(String[] destinationPointSetIds, RegionalTask task) {
int nPercentiles = task.percentiles.length;
int nDestinationPointSets = task.makeTauiSite ? 0 : task.destinationPointSetKeys.length;
int nDestinationPointSets = destinationPointSetIds.length;
// Create one grid writer per percentile and destination pointset.
var gridWriters = new ArrayList<GridResultWriter>();
for (int destinationIndex = 0; destinationIndex < nDestinationPointSets; destinationIndex++) {
for (int percentileIndex = 0; percentileIndex < nPercentiles; percentileIndex++) {
String destinationPointSetId = regionalAnalysis.destinationPointSetIds[destinationIndex];
String destinationPointSetId = destinationPointSetIds[destinationIndex];
gridWriters.add(new GridResultWriter(
task,
percentileIndex,
Expand Down Expand Up @@ -148,7 +147,9 @@ public static List<GridResultWriter> createWritersFromTask(RegionalAnalysis regi
}
}

/** Gzip the access grid and upload it to file storage (such as AWS S3). */
/**
* Gzip the access grid and return the files.
*/
@Override
public synchronized Map.Entry<FileStorageKey, File> finish () throws IOException {
randomAccessFile.close();
Expand Down

0 comments on commit 0a6f9b1

Please sign in to comment.