Goal: Recursively change a type 'string' or 'number' generically via an applied function.
-
This function deepCloneAndModify will recursively convert a given JavaScript type by applying a function
-
The sample applies convertStrToEpoch and copies the behavior of the sample dateToEpochConversion.
-
Requires Eventing Storage (or metadata collection) and a "source" collection.
-
Will operate on all documents where doc.type === "my_data_type".
-
deepCloneAndModify
-
Input Data/Mutation
-
Output Data/Mutation
// To run configure the settings for this Function, deepCloneAndModify, as follows:
//
// Version 7.1+
// "Function Scope"
// *.* (or try bulk.data if non-privileged)
// Version 7.0+
// "Listen to Location"
// bulk.data.source
// "Eventing Storage"
// rr100.eventing.metadata
// Binding(s)
// 1. "binding type", "alias name...", "bucket.scope.collection", "Access"
// "bucket alias", "src_col", "bulk.data.source", "read and write"
//
// Version 6.X
// "Source Bucket"
// source
// "MetaData Bucket"
// metadata
// Binding(s)
// 1. "binding type", "alias name...", "bucket", "Access"
// "bucket alias", "src_col", "source", "read and write"
function convertStrToEpoch(key, val, control) {
// We want to convert string dates to a Epoch tstamp, first we utilize
// a regex to filter, then if that passes we use Date.parse(..)
// to convert string to numeric timestamps, finally if the last argument
// is 'true' then add a suffix of "_ms" or "_sec" to the field name.
// Date.parse("2009/06/29 13:30:10", "yyyy/MM/dd HH:mm:ss");
// Date.parse("2020-06-24 09.55.22 -07:00", "yyyy-MM-dd HH:mm:ss Z");
// uses a self explanatory control document as follows:
// var control = [
// { "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+$/g, "millis": true, "rename": true },
// { "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d$/g, "millis": false, "rename": true }
// ];
for (var i = 0; i < control.length; i++) {
var regxres = val.match(control[i].regx);
if (regxres !== null) {
var dt = Date.parse(val);
if (dt && dt !== null && !isNaN(dt)) {
var suffix = "";
var epochMs = dt // .getTime();
if (key !== "" && control[i].rename) {
if (control[i].millis) {
suffix = "_ms";
} else {
suffix = "_sec";
}
}
if (control[i].millis) {
val = epochMs;
} else {
val = Math.round(epochMs / 1000);
}
return { "key": key + suffix, "val": val, "converted": true };
}
}
}
return { "key": key, "val": val, "converted": false };
}
function deepCloneAndModify(obj, control, fn, fntype) {
// This does a deepCopy or deepClone but applies convertStrToEpoch where needed
// process terminal strings
if (obj && typeof obj === fntype) {
var retval = fn("", obj, control);
return retval.val;
}
if (!obj || typeof obj !== 'object') {
return obj;
}
// process Object
let newObj = {};
if (Array.isArray(obj)) {
newObj = obj.map(item => deepCloneAndModify(item, control, fn, fntype));
} else {
Object.keys(obj).forEach((k) => {
var retval = { "key": k, "val": obj[k] };
if (obj[k] && typeof obj[k] === fntype) {
retval = fn(k, obj[k], control);
}
return newObj[retval.key] = deepCloneAndModify(retval.val, control, fn, fntype);
})
}
return newObj;
}
function OnUpdate(doc, meta) {
if (doc.type !== "my_data_type") return;
var rename_with_ms_or_sec = true; // rename our fields
// Apply four (4) different datetime search patterns across the entire document
var control = [
{ "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+$/g, "millis": true, "rename": true },
{ "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d$/g, "millis": false, "rename": true },
{ "regx": /^\d\d\d\d-\d\d-\d\d \d\d:\d\d:\d\d.\d+ .\d\d:\d\d$/g, "millis": false, "rename": true },
{ "regx": /^\d\d\d\d\/\d\d\/\d\d \d\d:\d\d:\d\d .\d\d:\d\d$/g, "millis": true, "rename": true }
];
var newdoc = deepCloneAndModify(doc, control, convertStrToEpoch, 'string');
// alias src_col to our source collection. Since rev 6.5+ allowed to be in r/w mode
src_col[meta.id] = newdoc;
}
INPUT: KEY my_data_type::1001
{
"type": "my_data_type",
"id": 1001,
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"datet1c": "2009/06/29 13:30:10 -07:00",
"datet1d": "2009/06/29 13:30:10 -06:00",
"datet2a": "2009-06-29 13:30:10.333",
"datet2b": "2009-06-29 13:30:10.333",
"datet2c": "2009-06-29 13:30:10.333 -07:00",
"datet2d": "2009-06-29 13:30:10.333 -06:00",
"subdoc_same_dates": {
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"datet1c": "2009/06/29 13:30:10 -07:00",
"datet1d": "2009/06/29 13:30:10 -06:00",
"datet2a": "2009-06-29 13:30:10.333",
"datet2b": "2009-06-29 13:30:10.333",
"datet2c": "2009-06-29 13:30:10.333 -07:00",
"datet2d": "2009-06-29 13:30:10.333 -06:00",
"subsubdoc_two_dates": {
"datet1a": "2009/06/29 13:30:10",
"datet1b": "2009/06/29 13:30:10",
"dary": [
"2009/06/29 13:30:10",
"2009-06-29 13:30:10.333",
{ "datet1a": "2009/06/29 13:30:10" }
]
}
}
}
UPDATED/OUTPUT: KEY my_data_type::1001
{
"type": "my_data_type",
"id": 1001,
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"datet1c_sec": 1246307410,
"datet1d_sec": 1246303810,
"datet2a_ms": 1246307410333,
"datet2b_ms": 1246307410333,
"datet2c_ms": 1246307410333,
"datet2d_ms": 1246303810333,
"subdoc_same_dates": {
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"datet1c_sec": 1246307410,
"datet1d_sec": 1246303810,
"datet2a_ms": 1246307410333,
"datet2b_ms": 1246307410333,
"datet2c_ms": 1246307410333,
"datet2d_ms": 1246303810333,
"subsubdoc_two_dates": {
"datet1a_sec": 1246307410,
"datet1b_sec": 1246307410,
"dary": [
1246307410,
1246307410333,
{
"datet1a_sec": 1246307410
}
]
}
}
}