bugfix in event scheduling. fix deprecated tfjs call

This commit is contained in:
Alexandre Storelli 2019-03-21 20:42:26 +01:00
parent 103966589e
commit 2936a8909a
4 changed files with 12 additions and 7 deletions

View File

@ -38,14 +38,14 @@ For best performance (~2x speedup) you should choose to do part of the computati
- Keras (tested with v2.0.8). Keras installation instructions are available [here](https://keras.io/#installation).
- Tensorflow (tested with `tensorflow` v1.4.0 and `tensorflow-gpu` v1.3.0). Installation instructions are [here](https://www.tensorflow.org/install/).
The following should be enough:
```bash
pip install keras tensorflow
```
should be enough. If you do not have pip [follow these instructions to install it](https://pip.pypa.io/en/stable/installing/).
If you do not have pip [follow these instructions to install it](https://pip.pypa.io/en/stable/installing/).
Then install this module:
```bash
git clone https://github.com/adblockradio/adblockradio.git
cd adblockradio

View File

@ -61,6 +61,7 @@ class PostProcessor extends Transform {
}
this.cache[0].audio = this.cache[0].audio ? Buffer.concat([this.cache[0].audio, obj.data]) : obj.data;
this.cache[0].metadataPath = obj.metadataPath;
if (obj.predInterval) this.cache[0].predInterval = obj.predInterval;
break;
case "fileChunk": // only in file analysis mode
@ -123,7 +124,7 @@ class PostProcessor extends Transform {
if (this.config.verbose) log.debug("---------------------");
const now = +new Date();
this.slotCounter++;
this.cache.unshift({ ts: null, audio: null, ml: null, hotlist: null, tBuf: tBuffer, n: this.slotCounter, pushed: false });
this.cache.unshift({ ts: null, audio: null, ml: null, hotlist: null, tBuf: tBuffer, n: this.slotCounter, predInterval: 0, pushed: false });
if (this.cache[1]) {
this.cache[1].ts = now;
@ -143,9 +144,12 @@ class PostProcessor extends Transform {
// schedule the postprocessing for this slot, according to the buffer available.
// "now" is used as a reference for _postProcessing, so it knows which slot to process
// postProcessing happens 500ms before audio playback, so that clients / players have time to act.
// Note: a given cache item is broadcast when the next one starts. so the delay between
// two cache slots (predInterval) is substracted from the available buffer time (tBuffer).
// TODO: replace tBuffer with tBuffer - predInterval here.
const ppTimeout = setTimeout(this._postProcessing, tBuffer * 1000 - consts.DOWNSTREAM_LATENCY, now);
const predInterval = this.cache[1] ? this.cache[1].predInterval : 0;
if (this.cache[1] && predInterval === 0) log.warn('zero predInterval!');
const ppTimeout = setTimeout(this._postProcessing, (tBuffer - predInterval) * 1000 - consts.DOWNSTREAM_LATENCY, now);
this.cache.find(c => c.ts === now).ppTimeout = ppTimeout;
}

View File

@ -70,7 +70,7 @@ function send(msg) {
(async function() {
const handler = tf.io.fileSystem(process.env.modelFile); // see https://stackoverflow.com/a/53766926/5317732
model = await tf.loadModel(handler);
model = await tf.loadLayersModel(handler);
// load model from remote file
//const path = 'https://www.adblockradio.com/models/' + canonical + '/model.json';

View File

@ -154,7 +154,8 @@ class Predictor {
try {
self.listener.write(Object.assign(dataObj, {
type: "audio",
metadataPath: self.dbs.metadataPath
metadataPath: self.dbs.metadataPath,
predInterval: self.config.predInterval,
}));
} catch (e) {
log.warn("could not write to listener. err=" + e);