diff --git a/.travis.yml b/.travis.yml
index 57e4e7cd..f87237a0 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -16,11 +16,6 @@ script:
- ./bin/stock-import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --file data/example.xml
- ./bin/stock-import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --file data/example.csv
- docker build -t commercetools/stock-import .
-env:
- global:
- - secure: HYwKMrJ7h5Sz7E1OJf3jv0NOsgvYQ60Kl6qlZUjahoHgYDe//cVLpNJrMPZhAeyTGbpxeeqKxDXorQ6oMn9wqYbQGAXcAWYb0aw0oQH1q9XNteLiW+xVb+PDPRouSCh/cJTjtKdfa/C5L+rb8n6P3TuAI8qooBV9WY8f21IU7aM=
- - secure: XKG5Icf8flDb4+A5t4pdAkLoQunlly/P6wVBnwhChGizgHaKC2oXrnLfPiQ1EryWdhYLjDSmU8jx7xxF84NPrHp5N8eJTpyW85tWAzpH6144VEyVpiti4OtMcbtq55ppsma6zb80IHxwcDKcg/mXg8r9F6tnUT7+7QmVah0Z7zQ=
- - secure: qRiJqZFmqQNfjZwk5AV8+DkIbVpyo38qqv8ks255J2zqDcTiBb28ME0UWWs9VXBOXrfPEKet07FAnUWCfguGqMG3+S3yFVxsaGlrX+ZQu8pSbXXseVgUOPPlVKAXHCm6fjf0jpElPhrJJc8QbQth+DUzrTfBTzgQMvEwhLG7auc=
notifications:
hipchat:
rooms:
diff --git a/Gruntfile.coffee b/Gruntfile.coffee
index 64ec356f..df795f41 100644
--- a/Gruntfile.coffee
+++ b/Gruntfile.coffee
@@ -19,8 +19,8 @@ module.exports = (grunt) ->
default: ['Gruntfile.coffee', 'src/**/*.coffee']
clean:
- default: "lib"
- test: "test"
+ default: "lib/*"
+ test: "test/*"
coffee:
options:
@@ -54,7 +54,7 @@ module.exports = (grunt) ->
# watching for changes
watch:
default:
- files: ["src/coffee/*.coffee"]
+ files: ["src/coffee/*.coffee", "src/spec/*.spec.coffee"]
tasks: ["build"]
test:
files: ["src/**/*.coffee"]
diff --git a/README.md b/README.md
index dfb4bea7..4d208b9d 100644
--- a/README.md
+++ b/README.md
@@ -30,16 +30,20 @@ When using SFTP, you should not use the `--file` option, instead you need to pro
### CSV Format
-Column 1 will be used as `SKU` identifier, whereas column 2 will be used as `quantity`.
-An example:
+A simple example:
```
-sku,quantity
-foo,9
-bar,-1
-SKU-123,42
+sku,quantityOnStock,restockableInDays,supplyChannel,expectedDelivery
+foo,9,3,channel-key,2016-10-27T14:36:04.487Z
+bar,-1,3,channel-key,2016-10-27T14:36:04.487Z
+SKU-123,42,3,other-channel,2016-10-27T14:36:04.487Z
```
-> Please note that the header names are currently ignored.
+### Custom fields
+```
+sku,quantityOnStock,customType,customField.foo,customField.bar
+123,77,my-type,12,nac
+abc,-3,my-type,5,ho
+```
### XML Format
@@ -48,11 +52,11 @@ SKU-123,42
foo
- 7
+ 7
bar
- 1
+ 1
```
diff --git a/data/example.csv b/data/example.csv
index 2003a76c..645d4a6f 100644
--- a/data/example.csv
+++ b/data/example.csv
@@ -1,4 +1,4 @@
-sku,quantity
+sku,quantityOnStock
foo,9
bar,-1
-SKU-123,42
\ No newline at end of file
+SKU-123,42
diff --git a/data/example.xml b/data/example.xml
index e891e957..28f8d90a 100644
--- a/data/example.xml
+++ b/data/example.xml
@@ -9,4 +9,4 @@
1
2013-11-19T00:00:00
-
\ No newline at end of file
+
diff --git a/package.json b/package.json
index 53b60e16..90f0b71b 100644
--- a/package.json
+++ b/package.json
@@ -43,10 +43,10 @@
"dependencies": {
"bluebird": "2.9.33",
"bunyan-logentries": "0.1.0",
- "csv": "0.3.7",
+ "csv": "^1.1.0",
"debug": "2.2.0",
"optimist": "0.6.1",
- "sphere-node-sdk": "^1.14.0",
+ "sphere-node-sdk": "^1.16.0",
"sphere-node-utils": "0.7.0",
"tmp": "0.0.23",
"underscore": "1.8.3",
@@ -55,17 +55,18 @@
},
"devDependencies": {
"coveralls": "2.11.2",
- "grunt-cli": "0.1.13",
"grunt": "0.4.5",
"grunt-bump": "0.0.13",
- "grunt-coffeelint": "0.0.8",
+ "grunt-cli": "0.1.13",
+ "grunt-coffeelint": "0.0.16",
"grunt-contrib-clean": "0.6.0",
"grunt-contrib-coffee": "0.13.0",
"grunt-contrib-concat": "0.5.1",
"grunt-contrib-watch": "0.6.1",
"grunt-shell": "0.6.4",
"istanbul": "0.3.17",
- "jasmine-node": "1.14.5",
+ "jasmine-node": "^1.14.5",
+ "sinon": "^1.17.6",
"sphere-coffeelint": "sphereio/sphere-coffeelint#master"
},
"keywords": [
diff --git a/src/coffee/constants.coffee b/src/coffee/constants.coffee
new file mode 100644
index 00000000..ebfcd849
--- /dev/null
+++ b/src/coffee/constants.coffee
@@ -0,0 +1,28 @@
+constants =
+ HEADER_SKU: 'sku'
+ HEADER_QUANTITY: 'quantityOnStock'
+ DEPRECATED_HEADER_QUANTITY: 'quantity'
+ HEADER_RESTOCKABLE: 'restockableInDays'
+ HEADER_EXPECTED_DELIVERY: 'expectedDelivery'
+ HEADER_SUPPLY_CHANNEL: 'supplyChannel'
+ HEADER_CUSTOM_TYPE: 'customType'
+ HEADER_CUSTOM_SEPERATOR: '.'
+ HEADER_CUSTOM_REGEX: new RegExp /^customField\./
+
+ CHANNEL_KEY_FOR_XML_MAPPING: 'expectedStock'
+ CHANNEL_REF_NAME: 'supplyChannel'
+ CHANNEL_ROLES: ['InventorySupply', 'OrderExport', 'OrderImport']
+ LOG_PREFIX: "[SphereStockImport] "
+ CHANNEL_REFERENCE_TYPE: 'channel'
+
+
+ REGEX_PRICE: new RegExp /^(([A-Za-z]{2})-|)([A-Z]{3}) (-?\d+)(-?\|(\d+)|)( ([^#]*)|)(#(.*)|)$/
+ REGEX_MONEY: new RegExp /^([A-Z]{3}) (-?\d+)$/
+ REGEX_INTEGER: new RegExp /^-?\d+$/
+ REGEX_FLOAT: new RegExp /^-?\d+(\.\d+)?$/
+ REGEX_LANGUAGE: new RegExp /^([a-z]{2,3}(?:-[A-Z]{2,3}(?:-[a-zA-Z]{4})?)?)$/
+ REGEX_CUR: new RegExp /^AED|AFN|ALL|AMD|ANG|AOA|ARS|AUD|AWG|AZN|BAM|BBD|BDT|BGN|BHD|BIF|BMD|BND|BOB|BRL|BSD|BTN|BWP|BYR|BZD|CAD|CDF|CHF|CLP|CNY|COP|CRC|CUC|CUP|CVE|CZK|DJF|DKK|DOP|DZD|EGP|ERN|ETB|EUR|FJD|FKP|GBP|GEL|GGP|GHS|GIP|GMD|GNF|GTQ|GYD|HKD|HNL|HRK|HTG|HUF|IDR|ILS|IMP|INR|IQD|IRR|ISK|JEP|JMD|JOD|JPY|KES|KGS|KHR|KMF|KPW|KRW|KWD|KYD|KZT|LAK|LBP|LKR|LRD|LSL|LYD|MAD|MDL|MGA|MKD|MMK|MNT|MOP|MRO|MUR|MVR|MWK|MXN|MYR|MZN|NAD|NGN|NIO|NOK|NPR|NZD|OMR|PAB|PEN|PGK|PHP|PKR|PLN|PYG|QAR|RON|RSD|RUB|RWF|SAR|SBD|SCR|SDG|SEK|SGD|SHP|SLL|SOS|SPL|SRD|STD|SVC|SYP|SZL|THB|TJS|TMT|TND|TOP|TRY|TTD|TVD|TWD|TZS|UAH|UGX|USD|UYU|UZS|VEF|VND|VUV|WST|XAF|XCD|XDR|XOF|XPF|YER|ZAR|ZMW|ZWD$/
+
+
+for name, value of constants
+ exports[name] = value
diff --git a/src/coffee/elasticio.coffee b/src/coffee/elasticio.coffee
index 163a80e9..bd628a94 100644
--- a/src/coffee/elasticio.coffee
+++ b/src/coffee/elasticio.coffee
@@ -28,7 +28,7 @@ exports.process = (msg, cfg, next, snapshot) ->
project_key: cfg.sphereProjectKey
timeout: 60000
user_agent: "#{package_json.name} - elasticio - #{package_json.version}",
- csvHeaders: 'sku, quantity'
+ csvHeaders: 'sku, quantityOnStock'
csvDelimiter: ','
stockimport = new StockImport logger, opts
diff --git a/src/coffee/mappings.coffee b/src/coffee/mappings.coffee
new file mode 100644
index 00000000..4b2248b1
--- /dev/null
+++ b/src/coffee/mappings.coffee
@@ -0,0 +1,100 @@
+_ = require 'underscore'
+_.mixin require('underscore-mixins')
+csv = require 'csv'
+CONS = require './constants'
+
+class CustomFieldMappings
+
+ constructor: (options = {}) ->
+ @errors = []
+
+ mapFieldTypes: ({fieldDefinitions, typeDefinitionKey, rowIndex, key, value, langHeader}) ->
+ result = undefined
+ _.each fieldDefinitions, (fieldDefinition) =>
+ if fieldDefinition.name is key
+ switch fieldDefinition.type.name
+ when 'Number' then result = @mapNumber value,typeDefinitionKey,rowIndex
+ when 'Boolean' then result = @mapBoolean value,typeDefinitionKey,rowIndex
+ when 'Money' then result = @mapMoney value,typeDefinitionKey,rowIndex
+ when 'LocalizedString' then result = @mapLocalizedString value, typeDefinitionKey, rowIndex,langHeader
+ when 'Set' then result = @mapSet value,typeDefinitionKey,rowIndex,fieldDefinition.type.elementType
+ else result = value
+ result
+
+ isValidValue: (rawValue) ->
+ return _.isString(rawValue) and rawValue.length > 0
+
+ mapNumber: (rawNumber, typeDefinitionKey, rowIndex, regEx = CONS.REGEX_INTEGER) ->
+ return unless @isValidValue(rawNumber)
+ matchedNumber = regEx.exec rawNumber
+ unless matchedNumber
+ @errors.push "[row #{rowIndex}:#{typeDefinitionKey}] The number '#{rawNumber}' isn't valid!"
+ return
+ parseInt matchedNumber[0],10
+ ###
+ custom,customField.name.de,customField.name.en
+ my-type,Hajo,Abi
+ //- {
+ custom: {
+ name: {
+ de: 'Hajo',
+ en: 'Abi'
+ }
+ }
+ }
+ ###
+ mapLocalizedString: (value, typeDefinitionKey, rowIndex, langHeader, regEx = CONS.REGEX_LANGUAGE) ->
+ if !regEx.test langHeader
+ @errors.push "[row #{rowIndex}:#{typeDefinitionKey}] localisedString header '#{langHeader}' format is not valid!" unless regEx.test langHeader
+ return
+ else
+ "#{langHeader}": value
+
+ mapSet: (values, typeDefinitionKey, rowIndex, elementType) ->
+ result = undefined
+ values = values.split(',')
+ result = _.map values, (value) =>
+ switch elementType.name
+ when 'Number' then @mapNumber value,typeDefinitionKey,rowIndex
+ when 'Boolean' then @mapBoolean value,typeDefinitionKey,rowIndex
+ when 'Money' then @mapMoney value,typeDefinitionKey,rowIndex
+ when 'LocalizedString' then @mapLocalizedString value, typeDefinitionKey, rowIndex
+ else value
+ _.reject(result, _.isUndefined)
+
+ mapBoolean: (rawBoolean, typeDefinitionKey, rowIndex) ->
+ result = undefined
+ if _.isUndefined(rawBoolean) or (_.isString(rawBoolean) and _.isEmpty(rawBoolean))
+ return
+ errorMsg = "[row #{rowIndex}:#{typeDefinitionKey}] The value '#{rawBoolean}' isn't a valid boolean!"
+ try
+ b = JSON.parse(rawBoolean.toLowerCase())
+ if not _.isBoolean b
+ @errors.push error
+ return
+ b
+ catch
+ @errors.push errorMsg
+ return
+
+
+
+ # EUR 300
+ # USD 999
+ mapMoney: (rawMoney, typeDefinitionKey, rowIndex) ->
+ return unless @isValidValue(rawMoney)
+ matchedMoney = CONS.REGEX_MONEY.exec rawMoney
+ unless matchedMoney
+ @errors.push "[row #{rowIndex}:#{typeDefinitionKey}] Can not parse money '#{rawMoney}'!"
+ return
+
+ validCurr = CONS.REGEX_CUR.exec matchedMoney[1]
+ unless validCurr
+ @errors.push "[row #{rowIndex}:#{typeDefinitionKey}] Parsed currency is not valid '#{rawMoney}'!"
+ return
+
+ money =
+ currencyCode: matchedMoney[1].toUpperCase()
+ centAmount: parseInt matchedMoney[2],10
+
+module.exports = CustomFieldMappings
diff --git a/src/coffee/run.coffee b/src/coffee/run.coffee
index f42e3179..85c655ce 100644
--- a/src/coffee/run.coffee
+++ b/src/coffee/run.coffee
@@ -19,7 +19,6 @@ argv = require('optimist')
.describe('sphereAuthHost', 'SPHERE.IO OAuth host to connect to')
.describe('sphereAuthProtocol', 'SPHERE.IO OAuth protocol to connect to')
.describe('file', 'XML or CSV file containing inventory information to import')
- .describe('csvHeaders', 'a list of column names to use as mapping, comma separated')
.describe('csvDelimiter', 'the delimiter type used in the csv')
.describe('sftpCredentials', 'the path to a JSON file where to read the credentials from')
.describe('sftpHost', 'the SFTP host (overwrite value in sftpCredentials JSON, if given)')
@@ -35,7 +34,6 @@ argv = require('optimist')
.describe('logDir', 'directory to store logs')
.describe('logSilent', 'use console to print messages')
.describe('timeout', 'Set timeout for requests')
- .default('csvHeaders', 'sku, quantity')
.default('csvDelimiter', ',')
.default('logLevel', 'info')
.default('logDir', '.')
@@ -102,7 +100,6 @@ ensureCredentials(argv)
options = _.extend credentials,
timeout: argv.timeout
user_agent: "#{package_json.name} - #{package_json.version}"
- csvHeaders: argv.csvHeaders
csvDelimiter: argv.csvDelimiter
options.host = argv.sphereHost if argv.sphereHost
@@ -148,7 +145,7 @@ ensureCredentials(argv)
# unsafeCleanup: recursively removes the created temporary directory, even when it's not empty
tmp.dirAsync {unsafeCleanup: true}
- .then (tmpPath) =>
+ .then (tmpPath) ->
logger.debug "Tmp folder created at #{tmpPath}"
sftpHelper.download(tmpPath)
.then (files) ->
@@ -203,4 +200,4 @@ ensureCredentials(argv)
.catch (err) =>
logger.error err, "Problems on getting client credentials from config files."
@exitCode = 1
-.done()
\ No newline at end of file
+.done()
diff --git a/src/coffee/stockimport.coffee b/src/coffee/stockimport.coffee
index d81488c5..20badac0 100644
--- a/src/coffee/stockimport.coffee
+++ b/src/coffee/stockimport.coffee
@@ -1,25 +1,24 @@
debug = require('debug')('sphere-stock-import')
_ = require 'underscore'
_.mixin require('underscore-mixins')
-Csv = require 'csv'
+csv = require 'csv'
Promise = require 'bluebird'
{ElasticIo} = require 'sphere-node-utils'
{SphereClient, InventorySync} = require 'sphere-node-sdk'
package_json = require '../package.json'
+CONS = require './constants'
+CustomFieldMappings = require './mappings'
xmlHelpers = require './xmlhelpers'
-CHANNEL_KEY_FOR_XML_MAPPING = 'expectedStock'
-CHANNEL_REF_NAME = 'supplyChannel'
-CHANNEL_ROLES = ['InventorySupply', 'OrderExport', 'OrderImport']
-LOG_PREFIX = "[SphereStockImport] "
-
class StockImport
constructor: (@logger, options = {}) ->
+ options = _.defaults options, {user_agent: 'sphere-stock-import'}
@sync = new InventorySync
@client = new SphereClient options
@csvHeaders = options.csvHeaders
@csvDelimiter = options.csvDelimiter
+ @customFieldMappings = new CustomFieldMappings()
@_resetSummary()
_resetSummary: ->
@@ -60,7 +59,7 @@ class StockImport
else if _.size(msg.body) > 0
_ensureChannel = =>
if msg.body.CHANNEL_KEY?
- @client.channels.ensure(msg.body.CHANNEL_KEY, CHANNEL_ROLES)
+ @client.channels.ensure(msg.body.CHANNEL_KEY, CONS.CHANNEL_ROLES)
.then (result) ->
debug 'Channel ensured, about to create or update: %j', result
Promise.resolve(result.body.id)
@@ -90,7 +89,7 @@ class StockImport
ElasticIo.returnFailure err, next
.done()
else
- ElasticIo.returnFailure "#{LOG_PREFIX}No data found in elastic.io msg.", next
+ ElasticIo.returnFailure "#{CONS.LOG_PREFIX}No data found in elastic.io msg.", next
run: (fileContent, mode, next) ->
@_resetSummary()
@@ -99,7 +98,7 @@ class StockImport
else if mode is 'CSV'
@performCSV fileContent, next
else
- Promise.reject "#{LOG_PREFIX}Unknown import mode '#{mode}'!"
+ Promise.reject "#{CONS.LOG_PREFIX}Unknown import mode '#{mode}'!"
summaryReport: (filename) ->
if @_summary.created is 0 and @_summary.updated is 0
@@ -118,9 +117,9 @@ class StockImport
new Promise (resolve, reject) =>
xmlHelpers.xmlTransform xmlHelpers.xmlFix(fileContent), (err, xml) =>
if err?
- reject "#{LOG_PREFIX}Error on parsing XML: #{err}"
+ reject "#{CONS.LOG_PREFIX}Error on parsing XML: #{err}"
else
- @client.channels.ensure(CHANNEL_KEY_FOR_XML_MAPPING, CHANNEL_ROLES)
+ @client.channels.ensure(CONS.CHANNEL_KEY_FOR_XML_MAPPING, CONS.CHANNEL_ROLES)
.then (result) =>
stocks = @_mapStockFromXML xml.root, result.body.id
@_perform stocks, next
@@ -130,36 +129,22 @@ class StockImport
performCSV: (fileContent, next) ->
new Promise (resolve, reject) =>
- Csv().from.string(fileContent, {delimiter: @csvDelimiter, trim: true})
- .to.array (data, count) =>
+ csv.parse fileContent, {delimiter: @csvDelimiter, trim: true}, (error, data) =>
+ if (error)
+ reject "#{CONS.LOG_PREFIX}Problem in parsing CSV: #{error}"
+
headers = data[0]
- @_getHeaderIndexes headers, @csvHeaders
- .then (mappedHeaderIndexes) =>
- stocks = @_mapStockFromCSV _.tail(data), mappedHeaderIndexes[0], mappedHeaderIndexes[1]
- debug "Stock mapped from csv for headers #{mappedHeaderIndexes}: %j", stocks
+ @_mapStockFromCSV(_.rest(data), headers).then (stocks) =>
+ debug "Stock mapped from csv for headers #{headers}: %j", stocks
- # TODO: ensure channel ??
@_perform stocks, next
- .then (result) -> resolve result
+ .then (result) -> resolve result
.catch (err) -> reject err
.done()
- .on 'error', (error) ->
- reject "#{LOG_PREFIX}Problem in parsing CSV: #{error}"
performStream: (chunk, cb) ->
@_processBatches(chunk).then -> cb()
- _getHeaderIndexes: (headers, csvHeaders) ->
- Promise.all _.map csvHeaders.split(','), (h) =>
- cleanHeader = h.trim()
- mappedHeader = _.find headers, (header) -> header.toLowerCase() is cleanHeader.toLowerCase()
- if mappedHeader
- headerIndex = _.indexOf headers, mappedHeader
- debug "Found index #{headerIndex} for header #{cleanHeader}: %j", headers
- Promise.resolve(headerIndex)
- else
- Promise.reject "Can't find header '#{cleanHeader}' in '#{headers}'."
-
_mapStockFromXML: (xmljs, channelId) ->
stocks = []
if xmljs.row?
@@ -179,11 +164,99 @@ class StockImport
stocks.push d
stocks
- _mapStockFromCSV: (rows, skuIndex = 0, quantityIndex = 1) ->
- _.map rows, (row) =>
- sku = row[skuIndex].trim()
- quantity = row[quantityIndex]?.trim()
- @_createInventoryEntry sku, quantity
+ _mapStockFromCSV: (rows, mappedHeaderIndexes) ->
+ return new Promise (resolve, reject) =>
+ rowIndex = 0 # very weird that csv does not support this internally
+ csv.transform(rows,(row, cb) =>
+ rowIndex++
+ _data = {}
+
+ Promise.each(row, (cell, index) =>
+ headerName = mappedHeaderIndexes[index]
+
+ # Change deprecated header 'quantity' to 'quantityOnStock' for backward compatibility
+ if headerName == CONS.DEPRECATED_HEADER_QUANTITY
+ @logger.warn "The header name #{CONS.DEPRECATED_HEADER_QUANTITY} has been deprecated!"
+ @logger.warn "Please change #{CONS.DEPRECATED_HEADER_QUANTITY} to #{CONS.HEADER_QUANTITY}"
+ headerName = CONS.HEADER_QUANTITY
+
+ if CONS.HEADER_CUSTOM_REGEX.test headerName
+ customTypeKey = row[mappedHeaderIndexes.indexOf(CONS.HEADER_CUSTOM_TYPE)]
+
+ @_getCustomTypeDefinition(customTypeKey).then (response) =>
+ customTypeDefinition = response.body
+ @_mapCustomField(_data, cell, headerName, customTypeDefinition, rowIndex)
+
+ else
+ Promise.resolve(@_mapCellData(cell, headerName)).then (cellData) ->
+ _data[headerName] = cellData
+
+ ).then =>
+ if _.size(@customFieldMappings.errors) isnt 0
+ return cb @customFieldMappings.errors
+ cb null, _data
+ , (err, data) ->
+ if err
+ reject(err)
+ else
+ resolve(data)
+ )
+
+
+ _mapCellData: (data, headerName) ->
+ data = data?.trim()
+ switch on
+ when CONS.HEADER_QUANTITY is headerName then parseInt(data, 10) or 0
+ when CONS.HEADER_RESTOCKABLE is headerName then parseInt(data, 10)
+ when CONS.HEADER_SUPPLY_CHANNEL is headerName then @_mapChannelKeyToReference data
+ else data
+
+ _mapCustomField: (data, cell, headerName, customTypeDefinition, rowIndex) ->
+ fieldName = headerName.split(CONS.HEADER_CUSTOM_SEPERATOR)[1]
+ lang = headerName.split(CONS.HEADER_CUSTOM_SEPERATOR)[2]
+
+ # set data.custom once per row with the type defined
+ if !data.custom
+ data.custom = {
+ "type": {
+ "id": customTypeDefinition.id
+ },
+ "fields": {}
+ }
+ # Set localized object if present
+ if lang
+ data.custom.fields[fieldName] =
+ _.defaults (data.custom.fields[fieldName] || {}),
+ @customFieldMappings.mapFieldTypes({
+ fieldDefinitions: customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: customTypeDefinition.key,
+ rowIndex: rowIndex,
+ key: fieldName,
+ value: cell,
+ langHeader: lang,
+ })
+ else
+ data.custom.fields[fieldName] = @customFieldMappings.mapFieldTypes({
+ fieldDefinitions: customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: customTypeDefinition.key,
+ rowIndex: rowIndex,
+ key: fieldName,
+ value: cell,
+ })
+
+ # Memoize to prevent unneeded API calls
+ _getCustomTypeDefinition: _.memoize (customTypeKey) ->
+ @client.types.byKey(customTypeKey).fetch()
+
+ # Memoize to prevent unneeded API calls
+ _mapChannelKeyToReference: _.memoize (key) ->
+ @client.channels.where("key=\"#{key}\"").fetch()
+ .then (response) =>
+ if (response.body.results[0] && response.body.results[0].id)
+ return typeId: CONS.CHANNEL_REFERENCE_TYPE, id: response.body.results[0].id
+
+ @customFieldMappings.errors.push("Couldn\'t find channel with #{key} as key.")
+ .catch (@customFieldMappings.errors.push)
_createInventoryEntry: (sku, quantity, expectedDelivery, channelId) ->
entry =
@@ -191,7 +264,7 @@ class StockImport
quantityOnStock: parseInt(quantity, 10) or 0 # avoid NaN
entry.expectedDelivery = expectedDelivery if expectedDelivery?
if channelId?
- entry[CHANNEL_REF_NAME] =
+ entry[CONS.CHANNEL_REF_NAME] =
typeId: 'channel'
id: channelId
entry
@@ -206,10 +279,10 @@ class StockImport
QUANTITY: entry.quantityOnStock
if entry.expectedDelivery?
msg.body.EXPECTED_DELIVERY = entry.expectedDelivery
- if entry[CHANNEL_REF_NAME]?
- msg.body.CHANNEL_ID = entry[CHANNEL_REF_NAME].id
+ if entry[CONS.CHANNEL_REF_NAME]?
+ msg.body.CHANNEL_ID = entry[CONS.CHANNEL_REF_NAME].id
ElasticIo.returnSuccess msg, next
- Promise.resolve "#{LOG_PREFIX}elastic.io messages sent."
+ Promise.resolve "#{CONS.LOG_PREFIX}elastic.io messages sent."
else
@_processBatches(stocks)
@@ -254,10 +327,10 @@ class StockImport
# check channel
# - if they have the same channel, it's the same entry
# - if they have different channels or one of them has no channel, it's not
- if _.has(entry, CHANNEL_REF_NAME) and _.has(existingEntry, CHANNEL_REF_NAME)
- entry[CHANNEL_REF_NAME].id is existingEntry[CHANNEL_REF_NAME].id
+ if _.has(entry, CONS.CHANNEL_REF_NAME) and _.has(existingEntry, CONS.CHANNEL_REF_NAME)
+ entry[CONS.CHANNEL_REF_NAME].id is existingEntry[CONS.CHANNEL_REF_NAME].id
else
- if _.has(entry, CHANNEL_REF_NAME) or _.has(existingEntry, CHANNEL_REF_NAME)
+ if _.has(entry, CONS.CHANNEL_REF_NAME) or _.has(existingEntry, CONS.CHANNEL_REF_NAME)
false # one of them has a channel, the other not
else
true # no channel, but same sku
diff --git a/src/spec/elasticio.spec.coffee b/src/spec/elasticio.spec.coffee
index bd63b771..f9f4dbe6 100644
--- a/src/spec/elasticio.spec.coffee
+++ b/src/spec/elasticio.spec.coffee
@@ -105,7 +105,7 @@ describe 'elasticio integration', ->
sphereProjectKey: Config.config.project_key
csv =
'''
- sku,quantity
+ sku,quantityOnStock
c1,1
c2,2
c3,3
diff --git a/src/spec/helper-customTypePayload.spec.coffee b/src/spec/helper-customTypePayload.spec.coffee
new file mode 100644
index 00000000..7b049919
--- /dev/null
+++ b/src/spec/helper-customTypePayload.spec.coffee
@@ -0,0 +1,67 @@
+baseObj = {
+ "key": "my-type",
+ "name": { "en": "customized fields" },
+ "description": { "en": "customized fields definition" },
+ "resourceTypeIds": ["inventory-entry"],
+ "fieldDefinitions": [
+ {
+ "name": "description",
+ "type": { "name": "String" },
+ "required": false,
+ "label": { "en": "size" },
+ "inputHint": "SingleLine"
+ },
+ {
+ "name": "color",
+ "type": {"name": "String"},
+ "required": false,
+ "label": { "en": "color" },
+ "inputHint": "SingleLine"
+ },
+ {
+ "name": "quantityFactor",
+ "type": {"name": "Number"},
+ "required": false,
+ "label": { "en": "quantityFactor" },
+ "inputHint": "SingleLine"
+ },
+ {
+ "name": "price",
+ "type": {"name": "Money"},
+ "required": false,
+ "label": { "en": "price" },
+ "inputHint": "SingleLine"
+ },
+ {
+ "name": "localizedString",
+ "type": { "name": "LocalizedString" },
+ "required": false,
+ "label": { "en": "size" },
+ "inputHint": "SingleLine"
+ },
+ {
+ "name": "name",
+ "type": { "name": "LocalizedString" },
+ "required": false,
+ "label": { "en": "name" },
+ "inputHint": "SingleLine"
+ }
+ ]
+}
+
+exports.customTypePayload1 = ->
+ JSON.parse(JSON.stringify(baseObj))
+exports.customTypePayload2 = ->
+ data = JSON.parse(JSON.stringify(baseObj))
+ data.key = "my-type1"
+ data
+exports.customTypePayload3 = ->
+ data = JSON.parse(JSON.stringify(baseObj))
+ data.key = "my-type2"
+ data.fieldDefinitions[1] =
+ "name": "another",
+ "type": { "name": "String" },
+ "required": false,
+ "label": { "en": "size" },
+ "inputHint": "SingleLine"
+ data
diff --git a/src/spec/integration.spec.coffee b/src/spec/integration.spec.coffee
index 61bbb7fe..a560d516 100644
--- a/src/spec/integration.spec.coffee
+++ b/src/spec/integration.spec.coffee
@@ -5,6 +5,7 @@ Promise = require 'bluebird'
package_json = require '../package.json'
Config = require '../config'
StockImport = require '../lib/stockimport'
+{customTypePayload1, customTypePayload2, customTypePayload3} = require './helper-customTypePayload.spec'
cleanup = (logger, client) ->
logger.debug 'Deleting old inventory entries...'
@@ -13,7 +14,21 @@ cleanup = (logger, client) ->
Promise.all _.map result.body.results, (e) ->
client.inventoryEntries.byId(e.id).delete(e.version)
.then (results) ->
- logger.debug "#{_.size results} deleted."
+ logger.debug "Inventory #{_.size results} deleted."
+ logger.debug 'Deleting old types entries...'
+ client.types.all().fetch()
+ .then (result) ->
+ Promise.all _.map result.body.results, (e) ->
+ client.types.byId(e.id).delete(e.version)
+ .then (results) ->
+ logger.debug "Types #{_.size results} deleted."
+ logger.debug 'Deleting old channels entries...'
+ client.channels.all().fetch()
+ .then (result) ->
+ Promise.all _.map result.body.results, (e) ->
+ client.channels.byId(e.id).delete(e.version)
+ .then (results) ->
+ logger.debug "Channels #{_.size results} deleted."
Promise.resolve()
describe 'integration test', ->
@@ -29,14 +44,15 @@ describe 'integration test', ->
]
@stockimport = new StockImport @logger,
config: Config.config
- csvHeaders: 'stock,number'
+ csvHeaders: 'sku,quantityOnStock'
csvDelimiter: ','
@client = @stockimport.client
@logger.info 'About to setup...'
cleanup(@logger, @client)
- .then -> done()
+ .then =>
+ done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
@@ -293,11 +309,12 @@ describe 'integration test', ->
it 'CSV - one new stock', (done) ->
raw =
'''
- stock,number
+ sku,quantityOnStock
abcd,0
'''
@stockimport.run(raw, 'CSV')
- .then => @stockimport.summaryReport()
+ .then =>
+ @stockimport.summaryReport()
.then (message) =>
expect(message).toBe 'Summary: there were 1 imported stocks (1 were new and 0 were updates)'
@client.inventoryEntries.fetch()
@@ -319,3 +336,167 @@ describe 'integration test', ->
done()
.catch (err) -> done(_.prettify err)
, 10000 # 10sec
+
+ describe 'CSV file', =>
+ testChannel = undefined
+ testChannel2 = undefined
+
+ beforeEach (done) ->
+
+ # Clear memoize cache
+ @stockimport._getCustomTypeDefinition.cache = {}
+
+ @logger.info 'About to setup...'
+ cleanup(@logger, @client)
+ .then =>
+ @client.types.create(customTypePayload1())
+ .then =>
+ @client.types.create(customTypePayload2())
+ .then =>
+ @client.types.create(customTypePayload3())
+ .then (res) =>
+ @client.channels.create(key: 'testchannel').then (result) ->
+ testChannel = result.body
+ .then (res) =>
+ @client.channels.create(key: 'testchannel2').then (result) ->
+ testChannel2 = result.body
+ done()
+ .catch (err) -> done(_.prettify err)
+ , 10000 # 10sec
+
+ afterEach (done) ->
+ @logger.info 'About to cleanup...'
+ cleanup(@logger, @client)
+ .then -> done()
+ .catch (err) -> done(_.prettify err)
+ , 10000 # 10sec
+
+ it 'CSV - one new stock', (done) ->
+ raw =
+ """
+ sku,quantityOnStock,restockableInDays,expectedDelivery,supplyChannel,customType,customField.quantityFactor,customField.color,customField.localizedString.de,customField.localizedString.en
+ another2,77,12,2001-09-11T14:00:00.000Z,#{testChannel.key},my-type,12,nac,Schneidder,Abi
+ """
+ @stockimport.run(raw, 'CSV')
+ .then =>
+ @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: there were 1 imported stocks (1 were new and 0 were updates)'
+ @client.inventoryEntries.fetch()
+ .then (result) =>
+ stocks = result.body.results
+ expect(_.size stocks).toBe 1
+ expect(stocks[0].sku).toBe 'another2'
+ expect(stocks[0].quantityOnStock).toBe 77
+ @stockimport.run(raw, 'CSV')
+ .then => @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: nothing to do, everything is fine'
+ @client.inventoryEntries.fetch()
+ .then (result) ->
+ stocks = result.body.results
+ expect(_.size stocks).toBe 1
+ expect(stocks[0].sku).toBe 'another2'
+ expect(stocks[0].quantityOnStock).toBe 77
+ done()
+ .catch (err) ->
+ done(_.prettify err)
+ , 10000 # 10sec
+
+ it 'CSV - should ignore empty fields in customFields', (done) ->
+ raw =
+ """
+ sku,quantityOnStock,restockableInDays,expectedDelivery,customType,customField.quantityFactor,customField.color,customField.another,customField.localizedString.de,customField.localizedString.en
+ another3,77,12,2001-09-11T14:00:00.000Z,my-type,12,nac,,Schneidder,Abi
+ another10,77,12,2001-09-11T14:00:00.000Z,my-type2,12,,okay,Schneidder,Abi
+ """
+
+ @stockimport.run(raw, 'CSV')
+ .then =>
+ @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: there were 2 imported stocks (2 were new and 0 were updates)'
+ @client.inventoryEntries.fetch()
+ .then (result) =>
+ stocks = result.body.results
+ expect(_.size stocks).toBe 2
+ stock1 = _.find stocks, (stock) -> stock.sku is 'another3'
+ stock2 = _.find stocks, (stock) -> stock.sku is 'another10'
+ expect(stock1).toBeDefined()
+ expect(stock1.quantityOnStock).toBe 77
+ expect(stock1.custom.fields.another).not.toBeDefined()
+ expect(stock2.custom.fields.another).toBeDefined()
+ expect(stock2.custom.fields.color).not.toBeDefined()
+ expect(stock2.custom.fields.another).toBe 'okay'
+ @stockimport.run(raw, 'CSV')
+ .then => @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: nothing to do, everything is fine'
+ @client.inventoryEntries.fetch()
+ .then (result) ->
+ stocks = result.body.results
+ expect(_.size stocks).toBe 2
+ stock1 = _.find stocks, (stock) -> stock.sku is 'another3'
+ expect(stock1.sku).toBe 'another3'
+ expect(stock1.quantityOnStock).toBe 77
+ done()
+ .catch (err) ->
+ console.log JSON.stringify(err, null,2)
+ done(_.prettify err)
+ , 10000 # 10sec
+
+ it 'CSV - update stock', (done) ->
+ raw =
+ """
+ sku,quantityOnStock,restockableInDays,expectedDelivery,supplyChannel,customType,customField.quantityFactor,customField.color,customField.localizedString.de,customField.localizedString.en
+ another2,77,12,2001-09-11T14:00:00.000Z,#{testChannel2.key},my-type1,12,nac,Schneidder,Abi
+ """
+ raw2 =
+ """
+ sku,quantityOnStock,restockableInDays,expectedDelivery,supplyChannel,customType,customField.quantityFactor,customField.color,customField.localizedString.de,customField.localizedString.en
+ another2,72,10,2001-08-11T14:00:00.000Z,#{testChannel2.key},my-type1,12,blue,Schneidder,Josh
+ """
+ @stockimport.run(raw, 'CSV')
+ .then =>
+ @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: there were 1 imported stocks (1 were new and 0 were updates)'
+ @client.inventoryEntries.fetch()
+ .then (result) =>
+ stocks = result.body.results
+ expect(_.size stocks).toBe 1
+ expect(stocks[0].sku).toBe 'another2'
+ expect(stocks[0].quantityOnStock).toBe 77
+ @stockimport.run(raw2, 'CSV')
+ .then => @stockimport.summaryReport()
+ .then (message) =>
+ expect(message).toBe 'Summary: there were 1 imported stocks (0 were new and 1 were updates)'
+ @client.inventoryEntries.fetch()
+ .then (result) ->
+ stocks = result.body.results
+ expect(_.size stocks).toBe 1
+ expect(stocks[0].sku).toBe 'another2'
+ expect(stocks[0].quantityOnStock).toBe 72
+ expect(stocks[0].custom.fields.localizedString.en).toBe 'Josh'
+ expect(stocks[0].custom.fields.color).toBe 'blue'
+ done()
+ .catch (err) ->
+ done(_.prettify err)
+ , 10000 # 10sec
+
+ it 'CSV - API should return error if required header is missing', (done) ->
+ raw =
+ """
+ sku,invalidheader,restockableInDays,expectedDelivery,supplyChannel,customType,customField.quantityFactor,customField.color,customField.localizedString.de,customField.localizedString.en
+ another2,77,12,2001-09-11T14:00:00.000Z,#{testChannel2.key},my-type1,12,nac,Schneidder,Abi
+ """
+ @stockimport.run(raw, 'CSV')
+ .then (result)=>
+ expect(result).not.toBeDefined
+ .catch (err) ->
+ expect(err).toBeDefined()
+ expect(err.message).toBe 'Request body does not contain valid JSON.'
+ expect(err.body.errors.length).toBe 1
+ expect(err.body.errors[0].detailedErrorMessage).toBe 'quantityOnStock: Missing required value'
+ done()
+ , 10000 # 10sec
diff --git a/src/spec/mappings.spec.coffee b/src/spec/mappings.spec.coffee
new file mode 100644
index 00000000..8d01f043
--- /dev/null
+++ b/src/spec/mappings.spec.coffee
@@ -0,0 +1,243 @@
+
+Mappings = require '../lib/mappings'
+
+describe 'Mappings', ->
+ beforeEach ->
+ @map = new Mappings()
+ @customTypeDefinition =
+ key: 'my-category'
+ "fieldDefinitions": [
+ {
+ "name": "stringtype",
+ "type": {
+ "name": "String"
+ },
+ },
+ {
+ "name": "booleantype",
+ "type": {
+ "name": "Boolean"
+ },
+ },
+ {
+ "name": "money",
+ "type": {
+ "name": "Money"
+ },
+ },
+ {
+ "name": "numbertype",
+ "type": {
+ "name": "Number"
+ },
+ },
+ {
+ "name": "localizedstringtype",
+ "type": {
+ "name": "LocalizedString"
+ },
+ },
+ {
+ "name": "enumtype",
+ "type": {
+ "name": "Enum",
+ "values": [
+ {
+ "key": "en",
+ "label": "okay"
+ }
+ ]
+ },
+ },
+ {
+ "name": "localizedenumtype",
+ "type": {
+ "name": "LocalizedEnum",
+ "values": [
+ {
+ "key": "enwew",
+ "label": {
+ "de": "Hundefutter",
+ "en": "dog food"
+ }
+ }
+ ]
+ },
+ },
+ {
+ "name": "settype",
+ "type": {
+ "name": "Set",
+ "elementType": {
+ "name": "Number"
+ }
+ },
+ },
+ {
+ "name": "datetype",
+ "type": {
+ "name": "Date"
+ },
+ },
+ {
+ "name": "datetimetype",
+ "type": {
+ "name": "DateTime"
+ },
+ },
+ {
+ "name": "time",
+ "type": {
+ "name": "Time"
+ },
+ }
+ ]
+
+ it 'should initialize', ->
+ expect(@map).toBeDefined()
+ expect(@map.mapNumber).toBeDefined()
+ expect(@map.mapLocalizedString).toBeDefined()
+ expect(@map.mapSet).toBeDefined()
+ expect(@map.mapMoney).toBeDefined()
+ expect(@map.mapBoolean).toBeDefined()
+ expect(@map.mapFieldTypes).toBeDefined()
+
+ describe '::mapNumber', ->
+ it 'should convert strings to integer', ->
+ result = @map.mapNumber '3',@customTypeDefinition.key,2
+
+ expect(typeof result).toBe 'number'
+ it 'should return undefined is input is not a string', ->
+ result = @map.mapNumber 3,@customTypeDefinition.key,2
+
+ expect(@map.errors.length).toBe 0
+ expect(result).not.toBeDefined()
+
+ it 'should return error if input does not contain only numbers', ->
+ result = @map.mapNumber '123error',@customTypeDefinition.key,2
+ expect(@map.errors.length).toBe 1
+ expect(@map.errors[0]).toBe "[row 2:my-category] The number '123error' isn't valid!"
+ expect(result).not.toBeDefined()
+
+ describe '::mapLocalizedString', ->
+ it 'should convert to localizedString', ->
+ result = @map.mapLocalizedString 'foo',@customTypeDefinition.key,2,'de'
+
+ expect(result).toEqual {de: 'foo'}
+
+ it 'should add error if value is not valid', ->
+ result = @map.mapLocalizedString 'blue',@customTypeDefinition.key,2,'invalid'
+ expect(result).not.toBeDefined()
+ expect(@map.errors[0]).toBe "[row 2:my-category] localisedString header 'invalid' format is not valid!"
+
+ describe '::mapBoolean', ->
+ it 'should convert to boolean', ->
+ result = @map.mapBoolean 'true',@customTypeDefinition.key,2
+ expect(result).toBe true
+
+ it 'should add error if value is not a valid boolean', ->
+ result = @map.mapBoolean 'invalid',@customTypeDefinition.key,2
+ expect(result).not.toBeDefined()
+ expect(@map.errors[0]).toBe "[row 2:my-category] The value \'invalid\' isn\'t a valid boolean!"
+
+ describe '::mapFieldTypes', ->
+ it 'should map String type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'stringtype',
+ value: 'okay',
+ })
+ expect(result).toBe 'okay'
+
+ it 'should map Number type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'numbertype',
+ value: '123',
+ })
+ expect(@map.errors).toEqual []
+ expect(result).toBe 123
+
+ it 'should map Boolean type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'booleantype',
+ value: 'true',
+ })
+ expect(result).toBe true
+ expect(@map.errors).toEqual []
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'booleantype',
+ value: 'false',
+ })
+ expect(result).toBe false
+ expect(@map.errors).toEqual []
+
+ it 'should map Enum type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'enumtype',
+ value: 'la',
+ })
+ expect(result).toBe 'la'
+ expect(@map.errors).toEqual []
+
+ it 'should map localizedenumtype type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'localizedstringtype',
+ value: 'la',
+ langHeader: 'de',
+ })
+ expect(result).toEqual de: 'la'
+ expect(@map.errors).toEqual []
+
+ it 'should map money type', ->
+ result = @map.mapFieldTypes({
+ fieldDefinitions: @customTypeDefinition.fieldDefinitions,
+ typeDefinitionKey: @customTypeDefinition.key,
+ rowIndex: 2,
+ key: 'money',
+ value: 'EUR 1400',
+ })
+ expect(result).toEqual currencyCode: 'EUR', centAmount: 1400
+ expect(@map.errors).toEqual []
+
+ describe '::mapSet', ->
+ it 'should convert to set', ->
+ elementType = name: 'Number'
+ result = @map.mapSet '1,2,3,4',@customTypeDefinition.key,2,elementType
+ expect(result).toEqual [1,2,3,4]
+
+ it 'should add error if value valid and remove invalid values', ->
+ elementType = name: 'Number'
+ result = @map.mapSet '1,2,"3",4',@customTypeDefinition.key,2,elementType
+ expect(result).toEqual [1,2,4]
+ expect(@map.errors[0]).toBe "[row 2:my-category] The number '\"3\"' isn't valid!"
+
+ describe '::mapMoney', ->
+ it 'should convert to Money object', ->
+ result = @map.mapMoney 'EUR 140',@customTypeDefinition.key,2
+ expect(result).toEqual {currencyCode: 'EUR', centAmount: 140}
+
+ it 'should add error if value is not a valid money format', ->
+ result = @map.mapMoney 'invalid',@customTypeDefinition.key,2
+ expect(result).not.toBeDefined()
+ expect(@map.errors[0]).toBe "[row 2:my-category] Can not parse money 'invalid'!"
+ it 'should add error if currency in money is not a valid currency', ->
+ result = @map.mapMoney 'ABI 140',@customTypeDefinition.key,2
+ expect(result).not.toBeDefined()
+ expect(@map.errors[0]).toBe "[row 2:my-category] Parsed currency is not valid 'ABI 140'!"
diff --git a/src/spec/stockimport.spec.coffee b/src/spec/stockimport.spec.coffee
index 408bef18..ee775d86 100644
--- a/src/spec/stockimport.spec.coffee
+++ b/src/spec/stockimport.spec.coffee
@@ -1,14 +1,23 @@
_ = require 'underscore'
_.mixin require('underscore-mixins')
Promise = require 'bluebird'
-Csv = require 'csv'
+csv = require 'csv'
+sinon = require 'sinon'
{ExtendedLogger} = require 'sphere-node-utils'
package_json = require '../package.json'
Config = require '../config'
xmlHelpers = require '../lib/xmlhelpers.js'
StockImport = require '../lib/stockimport'
+{customTypePayload1} = require './helper-customTypePayload.spec'
+
describe 'StockImport', ->
+ cleanup = (endpoint) ->
+ endpoint.all().fetch()
+ .then (result) ->
+ Promise.all _.map result.body.results, (e) ->
+ endpoint.byId(e.id).delete(e.version)
+
beforeEach ->
logger = new ExtendedLogger
logConfig:
@@ -18,7 +27,6 @@ describe 'StockImport', ->
]
@import = new StockImport logger,
config: Config.config
- csvHeaders: 'id, amount'
csvDelimiter: ','
it 'should initialize', ->
@@ -26,6 +34,8 @@ describe 'StockImport', ->
expect(@import.client).toBeDefined()
expect(@import.client.constructor.name).toBe 'SphereClient'
expect(@import.sync).toBeDefined()
+ expect(@import.client?._rest?._options?.headers?['User-Agent'])
+ .toBe('sphere-stock-import')
expect(@import.sync.constructor.name).toBe 'InventorySync'
@@ -205,79 +215,240 @@ describe 'StockImport', ->
expect(s.supplyChannel.id).toBe 'myChannelId'
done()
-
- describe '::_getHeaderIndexes', ->
- it 'should reject if no sku header found', (done) ->
- @import._getHeaderIndexes ['bla', 'foo', 'quantity', 'price'], 'sku, q'
- .then (msg) -> done msg
- .catch (err) ->
- expect(err).toBe "Can't find header 'sku' in 'bla,foo,quantity,price'."
+ describe '::_mapChannelKeyToReference', ->
+ testChannel = undefined
+
+ beforeEach (done) ->
+ channelPayload = {
+ "key": "mah-channel"
+ }
+
+ cleanup(@import.client.channels)
+ .then =>
+ @import.client.channels.create(channelPayload)
+ .then((result) ->
+ testChannel = result
+ done()
+ )
+ .catch(done)
+
+ it 'should fetch reference from key', (done) ->
+ @import._mapChannelKeyToReference testChannel.body.key
+ .then (result) ->
+ expect(result).toEqual {typeId: 'channel', id: testChannel.body.id}
+ done()
+
+ describe '::_getCustomTypeDefinition', ->
+ types = undefined
+ customType = undefined
+
+ beforeEach (done) ->
+ types = @import.client.types
+ cleanup(@import.client.inventoryEntries).then ->
+ cleanup(types).then ->
+ customTypePayload = customTypePayload1()
+ types.create(customTypePayload).then (result) ->
+ customType = result.body
+ done()
+
+ afterEach (done) ->
+ cleanup(@import.client.types)
+ .then ->
+ done()
+
+ it 'should fetch customTypeDefinition', (done) ->
+
+ @import._getCustomTypeDefinition(customType.key).then (data) ->
+ result = data.body
+ expect(result).toBeDefined()
+ expect(result.key).toBe(customType.key)
+ expect(result.fieldDefinitions).toBeDefined()
done()
- it 'should reject if no quantity header found', (done) ->
- @import._getHeaderIndexes ['sku', 'price', 'quality'], 'sku, quantity'
- .catch (err) ->
- expect(err).toBe "Can't find header 'quantity' in 'sku,price,quality'."
+ it 'should memoize customTypeDefinition result', (done) ->
+ stub = sinon.stub(@import.client.types, 'byKey')
+ .onFirstCall('first').returns(fetch: -> Promise.resolve('first call'))
+ .onSecondCall('second').returns(fetch: -> Promise.resolve('second call'))
+ Promise.all([
+ @import._getCustomTypeDefinition('first'),
+ @import._getCustomTypeDefinition('first'),
+ @import._getCustomTypeDefinition('second'),
+ @import._getCustomTypeDefinition('second'),
+ @import._getCustomTypeDefinition('first'),
+ ]).then (result) ->
+ expect(result.length).toBe(5)
+ expect(stub.stub.calledTwice).toBeTruthy(
+ 'Only two calls are made, cached result is returned for other calls'
+ )
done()
- .then (msg) -> done msg
- it 'should return the indexes of the two named columns', (done) ->
- @import._getHeaderIndexes ['foo', 'q', 'bar', 's'], 's, q'
- .then (indexes) ->
- expect(indexes[0]).toBe 3
- expect(indexes[1]).toBe 1
- done()
- .catch (err) -> done(_.prettify err)
+ it 'should map custom fields with type String', (done) ->
+ rawCSV =
+ '''
+ sku,quantityOnStock,customType,customField.quantityFactor,customField.color
+ 123,77,my-type,12,nac
+ abc,-3,my-type,5,ho
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(_.size stocks).toBe 2
+ s = stocks[0]
+ expect(s.sku).toBe '123'
+ expect(s.quantityOnStock).toBe(77)
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.quantityFactor).toBe(12)
+ expect(s.custom.fields.color).toBe 'nac'
+ s = stocks[1]
+ expect(s.sku).toBe 'abc'
+ expect(s.quantityOnStock).toBe -3
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.quantityFactor).toBe 5
+ expect(s.custom.fields.color).toBe 'ho'
+ done()
+
+ it 'should map custom fields with type LocalizedString', (done) ->
+ rawCSV =
+ '''
+ sku,quantityOnStock,customType,customField.localizedString.en,customField.localizedString.de,customField.name.de
+ 123,77,my-type,english,deutsch,abi
+ abc,-3,my-type,blue,automat,sil
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0])
+ .then((stocks) ->
+ expect(_.size stocks).toBe 2
+ s = stocks[0]
+ expect(s.sku).toBe '123'
+ expect(s.quantityOnStock).toBe(77)
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.localizedString.en).toBe 'english'
+ expect(s.custom.fields.localizedString.de).toBe 'deutsch'
+ expect(s.custom.fields.name.de).toBe 'abi'
+ s = stocks[1]
+ expect(s.sku).toBe 'abc'
+ expect(s.quantityOnStock).toBe -3
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.localizedString.en).toBe 'blue'
+ expect(s.custom.fields.localizedString.de).toBe 'automat'
+ expect(s.custom.fields.name.de).toBe 'sil'
+ done())
+ .catch (err) ->
+ expect(err).not.toBeDefined()
+ done()
+
+ it 'should map custom fields with type Money', (done) ->
+ rawCSV =
+ '''
+ sku,quantityOnStock,customType,customField.price,customField.color
+ 123,77,my-type,EUR 120,nac
+ abc,-3,my-type,EUR 230,ho
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(_.size stocks).toBe 2
+ s = stocks[0]
+ expect(s.sku).toBe '123'
+ expect(s.quantityOnStock).toBe(77)
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.price).toEqual {currencyCode: 'EUR', centAmount: 120}
+ expect(s.custom.fields.color).toBe 'nac'
+ s = stocks[1]
+ expect(s.sku).toBe 'abc'
+ expect(s.quantityOnStock).toBe -3
+ expect(s.custom.type.id).toBeDefined()
+ expect(s.custom.fields.price).toEqual {currencyCode: 'EUR', centAmount: 230}
+ expect(s.custom.fields.color).toBe 'ho'
+ done()
+
+ it 'should report errors on data', (done) ->
+ rawCSV =
+ '''
+ sku,quantityOnStock,customType,customField.price,customField.color
+ 123,77,my-type,EUR 120,nac
+ abc,-3,my-type,EUR,ho
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then((stocks) ->
+ expect(stocks).not.toBeDefined()
+ ).catch (err) ->
+ expect(err.length).toBe 1
+ expect(err.join()).toContain('Can not parse money')
+ done()
describe '::_mapStockFromCSV', ->
-
it 'should map a simple entry', (done) ->
rawCSV =
'''
- id,amount
+ sku,quantityOnStock
123,77
abc,-3
'''
- Csv().from.string(rawCSV).to.array (data, count) =>
- stocks = @import._mapStockFromCSV _.rest(data)
- expect(_.size stocks).toBe 2
- s = stocks[0]
- expect(s.sku).toBe '123'
- expect(s.quantityOnStock).toBe 77
- s = stocks[1]
- expect(s.sku).toBe 'abc'
- expect(s.quantityOnStock).toBe -3
- done()
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(_.size stocks).toBe 2
+ s = stocks[0]
+ expect(s.sku).toBe '123'
+ expect(s.quantityOnStock).toBe 77
+ s = stocks[1]
+ expect(s.sku).toBe 'abc'
+ expect(s.quantityOnStock).toBe -3
+ done()
+
+ it 'should map deprecated header quantity', (done) ->
+ rawCSV =
+ '''
+ sku,quantity
+ 123,77
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(stocks[0].sku).toBe '123'
+ expect(stocks[0].quantityOnStock).toBe 77
+ done()
- it 'shoud not crash when quantity is missing', (done) ->
+ it 'should not crash when quantity is missing', (done) ->
rawCSV =
'''
- foo,id,amount
+ foo,sku,quantityOnStock
+ bar,abc,
+ bar,123,77
+ '''
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(_.size stocks).toBe 2
+ s = stocks[0]
+ expect(s.sku).toBe 'abc'
+ expect(s.quantityOnStock).toBe 0
+ s = stocks[1]
+ expect(s.sku).toBe '123'
+ expect(s.quantityOnStock).toBe 77
+ done()
+
+ it 'should crash when csv columns is inconsistent', (done) ->
+ # Empty columns should be represented with empty delimiter
+ rawCSV =
+ '''
+ foo,sku,quantityOnStock
bar,abc
bar,123,77
'''
- Csv().from.string(rawCSV).to.array (data, count) =>
- stocks = @import._mapStockFromCSV _.rest(data), 1, 2
- expect(_.size stocks).toBe 2
- s = stocks[0]
- expect(s.sku).toBe 'abc'
- expect(s.quantityOnStock).toBe 0
- s = stocks[1]
- expect(s.sku).toBe '123'
- expect(s.quantityOnStock).toBe 77
+ csv.parse rawCSV, (err, data) ->
+ expect(err).toBeDefined()
+ expect(err.message).toBe('Number of columns is inconsistent on line 2')
+ expect(data).not.toBeDefined()
done()
xit 'shoud not crash when quantity is missing', (done) ->
rawCSV =
'''
- foo,id,amount
+ foo,sku,quantityOnStock
bar
'''
- Csv().from.string(rawCSV).to.array (data, count) =>
- stocks = @import._mapStockFromCSV _.rest(data), 1, 2
- expect(_.size stocks).toBe 0
- done()
+ csv.parse rawCSV, (err, data) =>
+ @import._mapStockFromCSV(_.rest(data), data[0]).then (stocks) ->
+ expect(_.size stocks).toBe 0
+ done()
describe '::performCSV', ->
@@ -285,18 +456,18 @@ describe 'StockImport', ->
it 'should parse with a custom delimiter', (done) ->
rawCSV =
'''
- id;amount
+ sku;quantityOnStock
123;77
abc;-3
'''
@import.csvDelimiter = ';'
spyOn(@import, '_perform').andReturn Promise.resolve()
- spyOn(@import, '_getHeaderIndexes').andCallThrough()
+ spyOn(@import, '_mapStockFromCSV').andCallThrough()
@import.performCSV(rawCSV)
- .then (result) =>
- expect(@import._getHeaderIndexes).toHaveBeenCalledWith ['id', 'amount'], 'id, amount'
- done()
- .catch (err) -> done(_.prettify err)
+ .then (result) =>
+ expect(@import._mapStockFromCSV).toHaveBeenCalledWith [ [ '123', '77' ], [ 'abc', '-3' ] ], [ 'sku', 'quantityOnStock' ]
+ done()
+ .catch (err) -> done(_.prettify err)
describe '::performStream', ->