Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -57,7 +57,7 @@ class BulkCsvSpec extends RestIntTest implements SecuritySpecHelper {
when:
controller.params.attachmentId = attachment.id
controller.params['async'] = false //disable promise for test
controller.params['dataFilename'] = "contact.csv"
controller.params['payloadFilename'] = "contact.csv"
controller.params['saveDataAsFile'] = true //write to file

controller.bulkCreate()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -316,7 +316,7 @@ trait CrudApiController<D> extends RestApiController {
* 2. Call POST /api/upload?name=myZip.zip, take attachmentId from the result
* 3. Call POST /api/rally/<domain>/bulk with query params:
* - attachmentId=<attachment-id>
* - dataFilename= -- pass in data.csv and detail.csv as default of parameter for file names
* - payloadFilename= -- pass in data.csv and detail.csv as default of parameter for file names
* - headerPathDelimiter -- default is '.', pass in '_' for underscore (this is path delimiter for header names, not csv delimiter)
* @param syncJobArgs the syncJobArgs that is setup, important to have params on it
* @return the jobId
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ class SyncJobArgs {
Boolean saveDataAsFile //= false

/**
* (When attachmentId is set) Format for the data. either CSV or JSON are currently supported.
* (When dataId is set) Format for the data. either CSV or JSON are currently supported.
*/
DataMimeTypes dataFormat = DataMimeTypes.json

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,9 +40,9 @@ class BulkImportJobArgs extends SyncJobArgs {
Long attachmentId

/**
* (when attachmentId is set) the name of the data file in the zip, defaults to data.csv
* (when attachmentId is set) the name of the payload file in the zip, defaults to data.csv
*/
String dataFilename
String payloadFilename

/**
* (For payloadFormat=CSV with attachmentId) CSV header pathDelimiter.
Expand All @@ -51,7 +51,7 @@ class BulkImportJobArgs extends SyncJobArgs {
String headerPathDelimiter

/**
* (When attachmentId is set) Format for the data. either CSV or JSON are currently supported.
* (When attachmentId/payloadId is set) Format for the data. either CSV or JSON are currently supported.
*/
DataMimeTypes payloadFormat

Expand Down Expand Up @@ -111,6 +111,8 @@ class BulkImportJobArgs extends SyncJobArgs {
BasicDataBinder.bind(bijParams, p)
//put a full copy as is into the queryParams
bijParams.queryParams = p
//XXX remove this once we know its not being used
if(params.dataFilename) bijParams.payloadFilename = params.dataFilename
return bijParams
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,19 +31,19 @@ class DefaultCsvToMapTransformer implements CsvToMapTransformer {
*
* @param params map with
* - attachmentId
* - dataFilename : name of csv file inside zip
* - payloadFilename : name of csv file inside zip
* - headerPathDelimiter : Header delimeter
*
* @return List<Map>
*/
List<Map> process(BulkImportJobArgs params) {
Long attachmentId = params.attachmentId as Long
String dataFilename = params.dataFilename ?: "data.csv"
String payloadFilename = params.payloadFilename ?: "data.csv"
String headerPathDelimiter = params.headerPathDelimiter ?: "."

List<Map> rows
//try-with-resources so it automatically closes and cleans up after itself
try (InputStream ins = getInputStream(attachmentId, dataFilename)) {
try (InputStream ins = getInputStream(attachmentId, payloadFilename)) {
rows = processRows(ins, headerPathDelimiter) as List<Map>
}
return rows
Expand All @@ -52,17 +52,17 @@ class DefaultCsvToMapTransformer implements CsvToMapTransformer {
/**
* returns the InputStream for the attachment depending on whether its a zip or not
*/
InputStream getInputStream(Long attachmentId, String dataFileName){
InputStream getInputStream(Long attachmentId, String payloadFilename){
Attachment attachment = Attachment.get(attachmentId)
Validate.notNull(attachment, "Attachment not found : ${attachmentId}")
Resource res = attachment.resource
Validate.notNull(res)
File file = res.file
InputStream ins
//if its a zip then get the file specified in dataFilename
//if its a zip then get the file specified in payloadFilename
if(attachment.extension == 'zip') {
ins = ZipUtils.getZipEntryInputStream(file, dataFileName)
Validate.notNull(ins, "$dataFileName not found in zip")
ins = ZipUtils.getZipEntryInputStream(file, payloadFilename)
Validate.notNull(ins, "$payloadFilename not found in zip")
} else {
ins = file.newInputStream()
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ class DefaultCsvToMapTransformerSpec extends Specification implements DataRepoTe

when:
List<Map> rows = csvToMapTransformer.process(
new BulkImportJobArgs(attachmentId:attachment.id, dataFilename:"contact.csv")
new BulkImportJobArgs(attachmentId:attachment.id, payloadFilename:"contact.csv")
)

then:
Expand Down