Skip to content

Commit

Permalink
Add support for custom fields (#46)
Browse files Browse the repository at this point in the history
Refactor and update mappings and CSV data handling.

The header row is not static anymore and thus includes a major breaking change, in both the naming of the fields as in the removed static header option.
  • Loading branch information
hisabimbola authored and Siilwyn committed Nov 3, 2016
1 parent 1771e2e commit 2fdad47
Show file tree
Hide file tree
Showing 16 changed files with 996 additions and 136 deletions.
5 changes: 0 additions & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -16,11 +16,6 @@ script:
- ./bin/stock-import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --file data/example.xml
- ./bin/stock-import --projectKey ${SPHERE_PROJECT_KEY} --clientId ${SPHERE_CLIENT_ID} --clientSecret ${SPHERE_CLIENT_SECRET} --file data/example.csv
- docker build -t commercetools/stock-import .
env:
global:
- secure: HYwKMrJ7h5Sz7E1OJf3jv0NOsgvYQ60Kl6qlZUjahoHgYDe//cVLpNJrMPZhAeyTGbpxeeqKxDXorQ6oMn9wqYbQGAXcAWYb0aw0oQH1q9XNteLiW+xVb+PDPRouSCh/cJTjtKdfa/C5L+rb8n6P3TuAI8qooBV9WY8f21IU7aM=
- secure: XKG5Icf8flDb4+A5t4pdAkLoQunlly/P6wVBnwhChGizgHaKC2oXrnLfPiQ1EryWdhYLjDSmU8jx7xxF84NPrHp5N8eJTpyW85tWAzpH6144VEyVpiti4OtMcbtq55ppsma6zb80IHxwcDKcg/mXg8r9F6tnUT7+7QmVah0Z7zQ=
- secure: qRiJqZFmqQNfjZwk5AV8+DkIbVpyo38qqv8ks255J2zqDcTiBb28ME0UWWs9VXBOXrfPEKet07FAnUWCfguGqMG3+S3yFVxsaGlrX+ZQu8pSbXXseVgUOPPlVKAXHCm6fjf0jpElPhrJJc8QbQth+DUzrTfBTzgQMvEwhLG7auc=
notifications:
hipchat:
rooms:
Expand Down
6 changes: 3 additions & 3 deletions Gruntfile.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -19,8 +19,8 @@ module.exports = (grunt) ->
default: ['Gruntfile.coffee', 'src/**/*.coffee']

clean:
default: "lib"
test: "test"
default: "lib/*"
test: "test/*"

coffee:
options:
Expand Down Expand Up @@ -54,7 +54,7 @@ module.exports = (grunt) ->
# watching for changes
watch:
default:
files: ["src/coffee/*.coffee"]
files: ["src/coffee/*.coffee", "src/spec/*.spec.coffee"]
tasks: ["build"]
test:
files: ["src/**/*.coffee"]
Expand Down
22 changes: 13 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -30,16 +30,20 @@ When using SFTP, you should not use the `--file` option, instead you need to pro

### CSV Format

Column 1 will be used as `SKU` identifier, whereas column 2 will be used as `quantity`.
An example:
A simple example:
```
sku,quantity
foo,9
bar,-1
SKU-123,42
sku,quantityOnStock,restockableInDays,supplyChannel,expectedDelivery
foo,9,3,channel-key,2016-10-27T14:36:04.487Z
bar,-1,3,channel-key,2016-10-27T14:36:04.487Z
SKU-123,42,3,other-channel,2016-10-27T14:36:04.487Z
```

> Please note that the header names are currently ignored.
### Custom fields
```
sku,quantityOnStock,customType,customField.foo,customField.bar
123,77,my-type,12,nac
abc,-3,my-type,5,ho
```

### XML Format

Expand All @@ -48,11 +52,11 @@ SKU-123,42
<root>
<row>
<code>foo</code>
<quantity>7</quantity>
<quantityOnStock>7</quantityOnStock>
</row>
<row>
<code>bar</code>
<quantity>1</quantity>
<quantityOnStock>1</quantityOnStock>
</row>
</root>
```
Expand Down
4 changes: 2 additions & 2 deletions data/example.csv
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
sku,quantity
sku,quantityOnStock
foo,9
bar,-1
SKU-123,42
SKU-123,42
2 changes: 1 addition & 1 deletion data/example.xml
Original file line number Diff line number Diff line change
Expand Up @@ -9,4 +9,4 @@
<quantity>1</quantity>
<CommittedDeliveryDate>2013-11-19T00:00:00</CommittedDeliveryDate>
</row>
</root>
</root>
11 changes: 6 additions & 5 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -43,10 +43,10 @@
"dependencies": {
"bluebird": "2.9.33",
"bunyan-logentries": "0.1.0",
"csv": "0.3.7",
"csv": "^1.1.0",
"debug": "2.2.0",
"optimist": "0.6.1",
"sphere-node-sdk": "^1.14.0",
"sphere-node-sdk": "^1.16.0",
"sphere-node-utils": "0.7.0",
"tmp": "0.0.23",
"underscore": "1.8.3",
Expand All @@ -55,17 +55,18 @@
},
"devDependencies": {
"coveralls": "2.11.2",
"grunt-cli": "0.1.13",
"grunt": "0.4.5",
"grunt-bump": "0.0.13",
"grunt-coffeelint": "0.0.8",
"grunt-cli": "0.1.13",
"grunt-coffeelint": "0.0.16",
"grunt-contrib-clean": "0.6.0",
"grunt-contrib-coffee": "0.13.0",
"grunt-contrib-concat": "0.5.1",
"grunt-contrib-watch": "0.6.1",
"grunt-shell": "0.6.4",
"istanbul": "0.3.17",
"jasmine-node": "1.14.5",
"jasmine-node": "^1.14.5",
"sinon": "^1.17.6",
"sphere-coffeelint": "sphereio/sphere-coffeelint#master"
},
"keywords": [
Expand Down
28 changes: 28 additions & 0 deletions src/coffee/constants.coffee
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
constants =
HEADER_SKU: 'sku'
HEADER_QUANTITY: 'quantityOnStock'
DEPRECATED_HEADER_QUANTITY: 'quantity'
HEADER_RESTOCKABLE: 'restockableInDays'
HEADER_EXPECTED_DELIVERY: 'expectedDelivery'
HEADER_SUPPLY_CHANNEL: 'supplyChannel'
HEADER_CUSTOM_TYPE: 'customType'
HEADER_CUSTOM_SEPERATOR: '.'
HEADER_CUSTOM_REGEX: new RegExp /^customField\./

CHANNEL_KEY_FOR_XML_MAPPING: 'expectedStock'
CHANNEL_REF_NAME: 'supplyChannel'
CHANNEL_ROLES: ['InventorySupply', 'OrderExport', 'OrderImport']
LOG_PREFIX: "[SphereStockImport] "
CHANNEL_REFERENCE_TYPE: 'channel'


REGEX_PRICE: new RegExp /^(([A-Za-z]{2})-|)([A-Z]{3}) (-?\d+)(-?\|(\d+)|)( ([^#]*)|)(#(.*)|)$/
REGEX_MONEY: new RegExp /^([A-Z]{3}) (-?\d+)$/
REGEX_INTEGER: new RegExp /^-?\d+$/
REGEX_FLOAT: new RegExp /^-?\d+(\.\d+)?$/
REGEX_LANGUAGE: new RegExp /^([a-z]{2,3}(?:-[A-Z]{2,3}(?:-[a-zA-Z]{4})?)?)$/
REGEX_CUR: new RegExp /^AED|AFN|ALL|AMD|ANG|AOA|ARS|AUD|AWG|AZN|BAM|BBD|BDT|BGN|BHD|BIF|BMD|BND|BOB|BRL|BSD|BTN|BWP|BYR|BZD|CAD|CDF|CHF|CLP|CNY|COP|CRC|CUC|CUP|CVE|CZK|DJF|DKK|DOP|DZD|EGP|ERN|ETB|EUR|FJD|FKP|GBP|GEL|GGP|GHS|GIP|GMD|GNF|GTQ|GYD|HKD|HNL|HRK|HTG|HUF|IDR|ILS|IMP|INR|IQD|IRR|ISK|JEP|JMD|JOD|JPY|KES|KGS|KHR|KMF|KPW|KRW|KWD|KYD|KZT|LAK|LBP|LKR|LRD|LSL|LYD|MAD|MDL|MGA|MKD|MMK|MNT|MOP|MRO|MUR|MVR|MWK|MXN|MYR|MZN|NAD|NGN|NIO|NOK|NPR|NZD|OMR|PAB|PEN|PGK|PHP|PKR|PLN|PYG|QAR|RON|RSD|RUB|RWF|SAR|SBD|SCR|SDG|SEK|SGD|SHP|SLL|SOS|SPL|SRD|STD|SVC|SYP|SZL|THB|TJS|TMT|TND|TOP|TRY|TTD|TVD|TWD|TZS|UAH|UGX|USD|UYU|UZS|VEF|VND|VUV|WST|XAF|XCD|XDR|XOF|XPF|YER|ZAR|ZMW|ZWD$/


for name, value of constants
exports[name] = value
2 changes: 1 addition & 1 deletion src/coffee/elasticio.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ exports.process = (msg, cfg, next, snapshot) ->
project_key: cfg.sphereProjectKey
timeout: 60000
user_agent: "#{package_json.name} - elasticio - #{package_json.version}",
csvHeaders: 'sku, quantity'
csvHeaders: 'sku, quantityOnStock'
csvDelimiter: ','

stockimport = new StockImport logger, opts
Expand Down
100 changes: 100 additions & 0 deletions src/coffee/mappings.coffee
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
_ = require 'underscore'
_.mixin require('underscore-mixins')
csv = require 'csv'
CONS = require './constants'

class CustomFieldMappings

constructor: (options = {}) ->
@errors = []

mapFieldTypes: ({fieldDefinitions, typeDefinitionKey, rowIndex, key, value, langHeader}) ->
result = undefined
_.each fieldDefinitions, (fieldDefinition) =>
if fieldDefinition.name is key
switch fieldDefinition.type.name
when 'Number' then result = @mapNumber value,typeDefinitionKey,rowIndex
when 'Boolean' then result = @mapBoolean value,typeDefinitionKey,rowIndex
when 'Money' then result = @mapMoney value,typeDefinitionKey,rowIndex
when 'LocalizedString' then result = @mapLocalizedString value, typeDefinitionKey, rowIndex,langHeader
when 'Set' then result = @mapSet value,typeDefinitionKey,rowIndex,fieldDefinition.type.elementType
else result = value
result

isValidValue: (rawValue) ->
return _.isString(rawValue) and rawValue.length > 0

mapNumber: (rawNumber, typeDefinitionKey, rowIndex, regEx = CONS.REGEX_INTEGER) ->
return unless @isValidValue(rawNumber)
matchedNumber = regEx.exec rawNumber
unless matchedNumber
@errors.push "[row #{rowIndex}:#{typeDefinitionKey}] The number '#{rawNumber}' isn't valid!"
return
parseInt matchedNumber[0],10
###
custom,customField.name.de,customField.name.en
my-type,Hajo,Abi
//- {
custom: {
name: {
de: 'Hajo',
en: 'Abi'
}
}
}
###
mapLocalizedString: (value, typeDefinitionKey, rowIndex, langHeader, regEx = CONS.REGEX_LANGUAGE) ->
if !regEx.test langHeader
@errors.push "[row #{rowIndex}:#{typeDefinitionKey}] localisedString header '#{langHeader}' format is not valid!" unless regEx.test langHeader
return
else
"#{langHeader}": value

mapSet: (values, typeDefinitionKey, rowIndex, elementType) ->
result = undefined
values = values.split(',')
result = _.map values, (value) =>
switch elementType.name
when 'Number' then @mapNumber value,typeDefinitionKey,rowIndex
when 'Boolean' then @mapBoolean value,typeDefinitionKey,rowIndex
when 'Money' then @mapMoney value,typeDefinitionKey,rowIndex
when 'LocalizedString' then @mapLocalizedString value, typeDefinitionKey, rowIndex
else value
_.reject(result, _.isUndefined)

mapBoolean: (rawBoolean, typeDefinitionKey, rowIndex) ->
result = undefined
if _.isUndefined(rawBoolean) or (_.isString(rawBoolean) and _.isEmpty(rawBoolean))
return
errorMsg = "[row #{rowIndex}:#{typeDefinitionKey}] The value '#{rawBoolean}' isn't a valid boolean!"
try
b = JSON.parse(rawBoolean.toLowerCase())
if not _.isBoolean b
@errors.push error
return
b
catch
@errors.push errorMsg
return



# EUR 300
# USD 999
mapMoney: (rawMoney, typeDefinitionKey, rowIndex) ->
return unless @isValidValue(rawMoney)
matchedMoney = CONS.REGEX_MONEY.exec rawMoney
unless matchedMoney
@errors.push "[row #{rowIndex}:#{typeDefinitionKey}] Can not parse money '#{rawMoney}'!"
return

validCurr = CONS.REGEX_CUR.exec matchedMoney[1]
unless validCurr
@errors.push "[row #{rowIndex}:#{typeDefinitionKey}] Parsed currency is not valid '#{rawMoney}'!"
return

money =
currencyCode: matchedMoney[1].toUpperCase()
centAmount: parseInt matchedMoney[2],10

module.exports = CustomFieldMappings
7 changes: 2 additions & 5 deletions src/coffee/run.coffee
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,6 @@ argv = require('optimist')
.describe('sphereAuthHost', 'SPHERE.IO OAuth host to connect to')
.describe('sphereAuthProtocol', 'SPHERE.IO OAuth protocol to connect to')
.describe('file', 'XML or CSV file containing inventory information to import')
.describe('csvHeaders', 'a list of column names to use as mapping, comma separated')
.describe('csvDelimiter', 'the delimiter type used in the csv')
.describe('sftpCredentials', 'the path to a JSON file where to read the credentials from')
.describe('sftpHost', 'the SFTP host (overwrite value in sftpCredentials JSON, if given)')
Expand All @@ -35,7 +34,6 @@ argv = require('optimist')
.describe('logDir', 'directory to store logs')
.describe('logSilent', 'use console to print messages')
.describe('timeout', 'Set timeout for requests')
.default('csvHeaders', 'sku, quantity')
.default('csvDelimiter', ',')
.default('logLevel', 'info')
.default('logDir', '.')
Expand Down Expand Up @@ -102,7 +100,6 @@ ensureCredentials(argv)
options = _.extend credentials,
timeout: argv.timeout
user_agent: "#{package_json.name} - #{package_json.version}"
csvHeaders: argv.csvHeaders
csvDelimiter: argv.csvDelimiter

options.host = argv.sphereHost if argv.sphereHost
Expand Down Expand Up @@ -148,7 +145,7 @@ ensureCredentials(argv)

# unsafeCleanup: recursively removes the created temporary directory, even when it's not empty
tmp.dirAsync {unsafeCleanup: true}
.then (tmpPath) =>
.then (tmpPath) ->
logger.debug "Tmp folder created at #{tmpPath}"
sftpHelper.download(tmpPath)
.then (files) ->
Expand Down Expand Up @@ -203,4 +200,4 @@ ensureCredentials(argv)
.catch (err) =>
logger.error err, "Problems on getting client credentials from config files."
@exitCode = 1
.done()
.done()
Loading

0 comments on commit 2fdad47

Please sign in to comment.