Async
Syntactic sugar in Swift for asynchronous dispatches in Grand Central Dispatch
I need to do the code like following:
function taskFirst(k, v) {
console.log(k, v);
}
function taskSecond(k, v) {
console.log(k, v);
}
function run() {
var g1 = "Something";
var g2 = "Something";
var g3 = "Something";
var g4 = "Something";
async.series(
[
taskFirst(g1, g2),
taskSecond(g3, g4)
],
function(error, result){
}
);
}
What is the right way to pass custom variables and async.js callback function?
Source: (StackOverflow)
I keep getting an ETIMEDOUT
or ECONNRESET
error followed by a Callback was already called
error when I run index.js
.
At first I thought it was because I was not including return
prior to calling the onEachLimitItem
callback. So I included it per the async multiple callbacks documentation. Still not solving it. I've also tried removing the error event and removing the callback to onEachLimit in the error event, but neither has worked. I've looked at the other SO questions around the issue of Callback already called
, but because they aren't concerned with streams, I didn't find a solution.
My understanding is that if the stream encounters an error like ECONNRESET
, it will return the callback in the error event and move on to the next stream, but this doesn't seem to be the case. It almost seems if the error resolves itself i.e. it re-connects and tries sending the errored steam up to Azure again and it works, then it triggers the 'finish' event, and we get the Callback already called
.
Am I handling the callbacks within the stream events correctly?
var Q = require('q');
var async = require('async');
var webshot = require('webshot');
var Readable = require('stream').Readable;
var azure = require('azure-storage');
var blob = azure.createBlobService('123', '112244');
var container = 'awesome';
var countries = [
'en-us', 'es-us', 'en-au', 'de-at', 'pt-br', 'en-ca', 'fr-ca', 'cs-cz', 'ar-ly', 'es-ve',
'da-dk', 'fi-fi', 'de-de', 'hu-hu', 'ko-kr', 'es-xl', 'en-my', 'nl-nl', 'en-nz', 'nb-no',
'nn-no', 'pl-pl', 'ro-ro', 'ru-ru', 'ca-es', 'es-es', 'eu-es', 'gl-es', 'en-gb', 'es-ar',
'nl-be', 'bg-bg', 'es-cl', 'zh-cn', 'es-co', 'es-cr', 'es-ec', 'et-ee', 'fr-fr', 'el-gr',
'zh-hk', 'en-in', 'id-id', 'en-ie', 'he-il', 'it-it', 'ja-jp', 'es-mx', 'es-pe', 'en-ph'
];
var uploadStreamToStorage = function (fileName, stream, onEachLimitItem) {
var readable = new Readable().wrap(stream);
var writeable = blob.createWriteStreamToBlockBlob(container, fileName);
readable.pipe(writeable);
writeable.on('error', function (error) {
return onEachLimitItem.call(error);
});
writeable.on('finish', function () {
onEachLimitItem.call(null);
});
};
var takeIndividualScreenshot = function (ID, country, onEachLimitItem) {
var fileName = ID + '-' + country + '.jpg';
var url = 'https://example.com/' + country + '/' + ID;
webshot(url, function (error, stream) {
if (error) { throw 'Screenshot not taken'; }
uploadStreamToStorage(fileName, stream, onEachLimitItem);
});
};
var getAllCountriesOfId = function (ID) {
var deferred = Q.defer();
var limit = 5;
function onEachCountry(country, onEachLimitItem) {
takeIndividualScreenshot(ID, country, onEachLimitItem);
}
async.eachLimit(countries, limit, onEachCountry, function (error) {
if (error) { deferred.reject(error); }
deferred.resolve();
});
return deferred.promise;
};
var createContainer = function () {
var df = Q.defer();
var self = this;
blob.createContainerIfNotExists(this.container, this.containerOptions, function (error) {
if (error) { df.reject(error); }
df.resolve(self.container);
});
return df.promise;
};
createContainer()
.then(function () {
return getAllCountriesOfId('211007');
})
.then(function () {
return getAllCountriesOfId('123456');
})
.fail(function (error) {
log.info(error);
});
Source: (StackOverflow)
I want to know how parallel execution works in async.js
async = require('async')
async.parallel([
function(callback){
for (var i = 0; i < 1000000000; i++) /* Do nothing */;
console.log("function: 1")
},
function(callback){
console.log("function: 2")
}
]);
In the above example, I expect obtain the output:
function: 2
function: 1
but, the console throws the inverse, what is happening? thanks.
Source: (StackOverflow)
Using mongoskin, I can do a query like this, which will return a cursor:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
}
}
However, I'd like to call some async functions for each document, and only move on to the next item on the cursor after this has called back (similar to the eachSeries structure in the async.js module). E.g:
myCollection.find({}, function(err, resultCursor) {
resultCursor.each(function(err, result) {
externalAsyncFunction(result, function(err) {
//externalAsyncFunction completed - now want to move to next doc
});
}
}
How could I do this?
Thanks
UPDATE:
I don't wan't to use toArray()
as this is a large batch operation, and the results might not fit in memory in one go.
Source: (StackOverflow)
I have a large node.js application that heavily uses the async.js module.
I have a lot of code like this:
async.series([
function(callback){
sql.update(query, callback);
},
function(callback){
if (something){
sql.update(query2, callback);
}
else{
callback(null);
}
}
]);
The big problem is the synchronous callback in the else statement. I read a while back that you should not do that with async.js as it could cause unexpected results, but I'm not sure what the best alternative is. I read that I should use process.nextTick in some places, but now I'm reading that we should not use that and it should be setImmediate.
Can someone point me in the right direction? Thanks!
Source: (StackOverflow)
I'm trying to traverse a tree of nested of items using async.js. The traversal terminates after going through just one branch.
var count=0;
exports.buildFamily = function(item_id, mback){
var extendedFamily={};
exports.getItembyId(item_id, function(err, item){
extendedFamily=item;
if(item.descendants){
extendedFamily.kids=[];
count=+item.descendants.length;
console.log('outercount ' + count);
async.eachSeries(item.descendants, function(item){
count--
console.log('item: ' + item)
exports.buildFamily(item, function(err, family){
console.log('deepcount: ' + count);
extendedFamily.kids.push(family);
if(count===0){ return mback(null, extendedFamily);}
else {extendedFamily.kids.push(family);}
})
})
}
else{
if(count===0){ return mback(null, extendedFamily);}
else{
extendedFamily.kids.push(family);
return;
}
}
});
};
Source: (StackOverflow)
As I have understood so far: Javascript is single threaded. If you defer the execution of some procedure, you just schedule it (queue it) to be run next time the thread is free. But Async.js defines two methods: Async::parallel & Async::parallelLimit
, and I quote:
- parallel(tasks, [callback])
Run an array of functions in parallel, without waiting until the previous function has completed. If any of the functions pass an error to its callback...
- parallelLimit(tasks, limit, [callback])
The same as parallel only the tasks are executed in parallel with a maximum of "limit" tasks executing at any time.
As far as to my understanding of English, when you say: "doing tasks in parallel" means doing them at the same time - simultaneously.
How may Async.js execute tasks in parallel in a single thread?
Am I missing something.
Source: (StackOverflow)
In an attempt to grasp Q.js
, I'd like to convert the following code using async.series
in Q.js
. Basically I create a folder if it doesn't exist (using mkdirp), move a file into a backup folder and save a file into a main folder.
var async = require('async');
var fs = require('fs');
var path = require('path');
var sessiondId = new Date().getTime() % 2 == 0 ? new Date().getTime().toString() : '_1234';
var backupFolder = path.join(__dirname,sessiondId);
var backupFullPath = path.join(backupFolder,'a.txt');
var fullPath = path.join(__dirname,'main','a.txt');
var mkdirp = require('mkdirp');
async.series({
createOrSkip: function(callback) {
mkdirp(backupFolder, function (err, dir) {
if(err) {
callback(err, null);
} else {
callback(null, {created: !!dir, folderAt: backupFolder});
}
});
},
move: function(callback) {
fs.rename(fullPath, backupFullPath, function(err) {
if(err) {
callback(err, null);
} else {
callback(null, {backupAt: backupFullPath});
}
});
},
write: function(callback) {
fs.writeFile(fullPath, 'abc', function(err) {
if (err) {
callback(err, null);
} else {
callback(null, {saveAt: fullPath});
}
});
}
}, function(err, result) {
console.log(result);
});
Actually I don't know where to start. Thanks for your help.
R.
Source: (StackOverflow)
I am using the node.js async package, specifically forEachSeries, to make a series of http requests based on parameters drawn from an array. In the callback of each request I have some if/else statements to respond to different types of responses.
// This is the callback of a GET request inside of a forEachSeries
function(error, response) {
if (response.results) {
// Do something with results
}
else if (!response.results) {
// Would like to use a continue statement here, but
// this is not inside of a loop
}
else {
// Do something else
}
}
Is there an equivalent to 'continue' that I can use inside of the else if above? This is not technically inside of a loop so continue does not work.
Source: (StackOverflow)
I am following the async module's each method (https://github.com/caolan/async#each). It says the method iterates over the array parallely. "Parallely" is the word that confuses me. AFAIK, in now way JavaScript can execute code parallely because it has a single-threaded model.
The examples shown in the each method focuses on the IO scenarios. I am using the "each" method just to add numbers of the array. If parallelism exists, can I prove this using my example?
Thanks for reading.
Source: (StackOverflow)
I'm trying to use async.whilst to regenerate a random number between 0 and the length of an array, until the length of the element on this index is larger than a specified length. I wanted to use async.whilst for this, but the syntax is not completely clear to me. I thought about doing the following:
var selectParagraph = function(paragraphs, callback){
var index = Math.floor(Math.random() * paragraphs.length
async.whilst(
function(){
return paragraphs[index].length < minParagraphLength;
},
function(cb) {
index = Math.floor(Math.random() * paragraphs.length);
},
function(err) {
console.log(paragraphs[index]);
callback(err, paragraphs[index]);
}
}
However, this doesn't work. I suppose it is because I didn't use the cb for the second function anywhere, but I don't exactly know how I should use it. Do I just call cb() after changing the index? What exactly does the variable err contain?
Source: (StackOverflow)
Just trying to get my head around using Async module for NodeJS.
I have the following code.
var a1 = [1,2,3,4,5,6,7,8];
async.forEachSeries(a1, function(n1, callback) {
console.log(n1);
var a2 = [10,11,12,13,14];
async.forEachSeries(a2, function(n2, callback) {
console.log(n1 + " " + n2);
callback();
});
callback();
});
I want to make the process of the above code in such a way that the print out becomes
1
1 10
1 11
1 12
1 13
1 14
2
2 10
2 11
2 12
2 13
2 14
3
3 10
3 11
3 12
3 13
3 14
.....
But instead I'm getting something like..
1
1 10
2
2 10
1 11
3
3 10
2 11
1 12
....
How do I fix this?
Source: (StackOverflow)
I'm using async module (see https://github.com/caolan/async) for Node.js and my question is... Why is waterfall so slow?
It takes about 4 seconds to execute this piece of code...
App.post("/form", function(request, response) {
Async.waterfall([
function(callback) {
console.log("1.");
callback(null, "some data");
},
function(data, callback) {
console.log("2.");
callback(null, "some data");
},
function(data, callback) {
console.log("3.");
callback(null, "some data");
}
], function(error, document) {
console.log("4.");
console.log("Done.");
response.send(); // Takes 4 seconds
});
}
Output
1.
2.
// After 4 seconds
3.
4.
Done.
Thanks for reply!
Source: (StackOverflow)
I'm trying to use async and request module together but i don't understand how the callbacks get passed. My code is
var fetch = function(file, cb) {
return request(file, cb);
};
async.map(['file1', 'file2', 'file3'], fetch, function(err, resp, body) {
// is this function passed as an argument to _fetch_
// or is it excecuted as a callback at the end of all the request?
// if so how do i pass a callback to the _fetch_ function
if(!err) console.log(body);
});
I'm trying to fetch 3 files in order and concatenate the results. My head is stuck in callbacks I tryed and the different combinations I could think of. Google wasn't much help.
Source: (StackOverflow)
I'm starting to learn node.js, and to aggregate multiple rss feeds in a single one, I'm fectching the feeds and then recreate a unique feed from the data I fecthed.
So, in order to handle multiple http requests asynchronously I use https://github.com/caolan/async#forEach which does the job.
But I can't figure how to return a value (the rss xml feed in my case).
Here is my code :
function aggregate(topic) {
async.forEach(topic.feeds,
function(item, callback) {
parseAndProcessFeed(item, callback);
},
function(err) {
// sort items by date
items.sort(function(a, b) {
return (Date.parse(b.date) - Date.parse(a.name));
});
var rssFeed = createAggregatedFeed();
console.log(rssFeed);
return rssFeed;
}
);
}
with console.log(rssFeed)
I can see the rss feed, so I think I'm missing something obvious.
How can I return the rssFeed
value?
Some help to get me back on seat would be great, thanks!
Xavier
Source: (StackOverflow)