/** * List the models in your project. * * @param {string=} filter An optional filter string to limit your results. See * https://firebase.google.com/docs/ml-kit/manage-hosted-models#list_your_projects_models */ const listModels = async (filter = null) => { let listOptions = filter == null ? {} : { filter: filter }; let models; // `listModels()` returns a page token if there are additional pages of // results that weren't returned by the request. We'll keep making requests // with the tokens until we've printed everything. let pageToken = null; do { if (pageToken) listOptions.pageToken = pageToken; ({models, pageToken} = await ml.listModels(listOptions)); for (const model of models) { const tags = model.tags == null ? '' : model.tags.join(", "); console.log(`${model.displayName}\t\t${model.modelId}\t ${tags}`); } } while (pageToken != null); }
/** * Upload a TensorFlow Lite model to your project and publish it. * * @param {string} tflite Path to the tflite file you want to upload. * @param {string} displayName A name to identify the model in your Firebase * project. This is the name you use from your app to load the model. * @param {?Array<string>=} tags Optional tags to help with model management. */ const uploadModel = async (tflite, displayName, tags = null) => { console.log('Uploading model to Cloud Storage...'); const files = await storageBucket.upload(tflite); const bucket = files[0].metadata.bucket; const name = files[0].metadata.name; const gcsUri = `gs://${bucket}/${name}`; const modelSpec = { displayName: displayName, tfliteModel: { gcsTfliteUri: gcsUri }, }; if (tags != null) { modelSpec.tags = tags; } const model = await ml.createModel(modelSpec); await ml.publishModel(model.modelId); const tagList = model.tags == null ? '' : model.tags.join(", "); console.log('Model uploaded and published:'); console.log(`${model.displayName}\t\t${model.modelId}\t ${tagList}`); }
/** * List the models in your project. * * @param {string=} filter An optional filter string to limit your results. See * https://firebase.google.com/docs/ml-kit/manage-hosted-models#list_your_projects_models */ const listModels = async (filter = null) => { let listOptions = filter == null ? {} : { filter: filter }; let models; // `listModels()` returns a page token if there are additional pages of // results that weren't returned by the request. We'll keep making requests // with the tokens until we've printed everything. let pageToken = null; do { if (pageToken) listOptions.pageToken = pageToken; ({models, pageToken} = await ml.listModels(listOptions)); for (const model of models) { const tags = model.tags == null ? '' : model.tags.join(", "); console.log(`${model.displayName}\t\t${model.modelId}\t ${tags}`); } } while (pageToken != null); }
/** * Upload a TensorFlow Lite model to your project and publish it. * * @param {string} tflite Path to the tflite file you want to upload. * @param {string} displayName A name to identify the model in your Firebase * project. This is the name you use from your app to load the model. * @param {?Array<string>=} tags Optional tags to help with model management. */ const uploadModel = async (tflite, displayName, tags = null) => { console.log('Uploading model to Cloud Storage...'); const files = await storageBucket.upload(tflite); const bucket = files[0].metadata.bucket; const name = files[0].metadata.name; const gcsUri = `gs://${bucket}/${name}`; const modelSpec = { displayName: displayName, tfliteModel: { gcsTfliteUri: gcsUri }, }; if (tags != null) { modelSpec.tags = tags; } const model = await ml.createModel(modelSpec); await ml.publishModel(model.modelId); const tagList = model.tags == null ? '' : model.tags.join(", "); console.log('Model uploaded and published:'); console.log(`${model.displayName}\t\t${model.modelId}\t ${tagList}`); }