I have trained a keras model with 6 classes for CNN Image Recognition. Then, I converted it to tensorflowjs using this command:
tensorflowjs_converter --quantization_bytes 1 --input_format=keras {saved_model_path} {target_path}
which then gives me 2 files (model.json, group1-shard1of1.bin). After adding it to my react native project and test the prediction function, it always returns the same result. I am converting my image to tensor base on this github thread:
const processImagePrediction = async (base64Image: any) => {
const fileUri = base64Image.uri;
const imgB64 = await FileSystem.readAsStringAsync(fileUri, {
encoding: FileSystem.EncodingType.Base64,
});
const imgBuffer = tf.util.encodeString(imgB64, 'base64').buffer;
const raw = new Uint8Array(imgBuffer);
let imageTensor = decodeJpeg(raw);
const scalar = tf.scalar(255);
imageTensor = tf.image.resizeNearestNeighbor(imageTensor, [224, 224]);
const tensorScaled = imageTensor.div(scalar);
//Reshaping my tensor base on my model size (224, 224)
const img = tf.reshape(tensorScaled, [1, 224, 224, 3]);
console.log("IMG: ", img);
const results = model.predict(img);
let pred = results.dataSync();
console.log("PREDICTION: ", pred);
}
I was expecting to have a different result but no matter what image I capture it always returns this result:
LOG IMG: {"dataId": {"id": 23}, "dtype": "float32", "id": 38, "isDisposedInternal": false, "kept": false, "rankType": "4", "scopeId": 6, "shape": [1, 224, 224, 3], "size": 150528, "strides": [150528, 672, 3]} LOG PREDICTION: [-7.7249579429626465, 4.73449182510376, 2.609705924987793, 20.0458927154541, -7.944214344024658, -18.101320266723633]
The numbers somehow change slightly but it retains the highest prediction (which is class number 4).
P.S. I'm new to image recognition so any tips would be appreciated.
In my class definition I have:
colours = {
None: None,
"Red": "255,0,0",
"Green": "0, 255, 0",
"Blue": "0, 0, 255",
}
self.colourComboTop.addItems(colours)
In my code:
def some_function(self):
self.colourComboTop.currentIndex()
Gets the index, 0, 1, 2, 3
self.colourComboTop.itemText(self.colourComboTop.currentIndex())
Gets the displayed text.
How do I access the value associated with the key? So with {"Red": "255,0,0"} I want the rgb value as a string.
Either set the RGB value as the userData argument,
for key, value in colours.items():
self.colourComboTop.addItem(key, value)
key = self.colourComboTop.currentText()
rgb = self.colourComboTop.currentData()
Or keep a reference to the dict and just map it.
self.colours = {
None: None,
"Red": "255,0,0",
"Green": "0, 255, 0",
"Blue": "0, 0, 255",
}
self.colourComboTop.addItems(self.colours)
key = self.colourComboTop.currentText()
rgb = self.colours[key]
I've successfully built and displayed a custom geometry (a cube) in SceneKit from this post
Custom Geometry
Now, I want to set the color of each face to a different color. I found this post that supposed to do that
Per Vertex Color
Unfortunately, I can't seem to set the color of a face of the cube to a specific color. For example, if I set the color of all vertices to SCNVector3(x:1,y:0,z:0), I would expect all faces to be red, but, instead, they're all green. Setting the colors to SCNVector3(x:0,y:1,z:0) turns the faces black. Here's the relevant code
let colors = [SCNVector3](count:vertices.count, repeatedValue:SCNVector3(x:1, y:0, z:0))
let colorData = NSData(bytes: colors, length: sizeof(SCNVector3) * colors.count)
let colorSource = SCNGeometrySource(data: colorData, semantic: SCNGeometrySourceSemanticColor, vectorCount: colors.count, floatComponents: true, componentsPerVector: 3, bytesPerComponent: sizeof(Float), dataOffset: 0, dataStride: sizeof(SCNVector3))
let geometry = SCNGeometry(sources: [colorSource, vertexSource, normalSource], elements: [element])
// Create a node and assign our custom geometry
let node = SCNNode()
let material = SCNMaterial()
material.diffuse.contents = NSColor.whiteColor()
geometry.materials = [material]
node.geometry = geometry
Does anyone know why it's not working?
You have to create a material per face with a different color.
For example:
let material1 = SCNMaterial()
material1.diffuse.contents = NSColor.whiteColor()
let material2 = SCNMaterial()
material2.diffuse.contents = NSColor.greenColor()
let material3 = SCNMaterial()
material3.diffuse.contents = NSColor.redColor()
let material4 = SCNMaterial()
material4.diffuse.contents = NSColor.blackColor()
let material5 = SCNMaterial()
material5.diffuse.contents = NSColor.blueColor()
let material6 = SCNMaterial()
material6.diffuse.contents = NSColor.whiteColor()
geometry.materials = [material1,material2,material3,material4,material5,material6]
I couldn't find an answer to my question: Can you have an array as value in a lua table?
local colors = {"blue" = {0,0,1,1}, "green" = {0,1,0,1}, "red" = {1,0,0,1} , "orange" = {0.5, 0, 0.5, 1}, "black" = {0,0,0,1}, "gold" = {1, 215/255, 0, 1}}
I get the error on this line using the corona sdk:
'}' expected near '='
It's tables all the way down :-) Yes, tables (including ones indexed like arrays) can be elements of tables in Lua. This chapter in the Lua manual explains the different ways of defining the elements of a table.
In your example, you should not put quotation marks around the key.
local colors = { blue = { 0, 1, ...}, green = { ... }, ... }
Or you can do:
local colors = {}
colors[ "blue" ] = { 0, ... }
colors[ "green" ] = ...
Or
colors.blue = { 0, .... }
colors.green = ....
The latter is syntactic sugar for the other forms.
I am using Tiled to make tile maps.
I save them as .txt, and level one is as follows:
[header]
width=14
height=8
tilewidth=120
tileheight=120
[tilesets]
tileset=Tileset.png,120,120,0,0
[layer]
type=Tile Layer 1
data=
2,2,2,2,2,2,2,2,2,2,2,2,2,2,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
3,1,1,1,1,1,1,1,1,1,1,1,1,3,
2,2,2,2,2,2,2,2,2,2,2,2,2,2
How would I make the data turn into something like:
mapArr = [
[ 0, 0, 0, 0, 0, 0, 0, 0],
[ 0,101,101,101,101,101,101, 0],
[ 0,101, 0,101,101,101,101, 0],
[ 0,101,101,101,101, 0,101, 0],
[ 0,101,101,101,101,101,102, 0],
[ 0, 0, 0, 0, 0, 0, 0, 0]
];
Thank you in advance
This is very basic example. It makes some assumptions about how the input text is formatted, but this should get you started.
import flash.events.Event;
import flash.utils.Dictionary;
//create a loader and request object for data file
var loader:URLLoader = new URLLoader();
loader.addEventListener(Event.COMPLETE, loaderCompleteHandler, false, 0, true);
var request:URLRequest = new URLRequest("level1.txt");
//begin the load
try {
loader.load(request);
} catch (error:Error) {
trace("Unable to load requested document.");
}
//on load complete....
function loaderCompleteHandler(event:Event) : void
{
loader.removeEventListener(Event.COMPLETE, loaderCompleteHandler);
parse(loader.data)
}
//parse the data file
function parse(data:String) : Array
{
var lines = data.split('\n'); //split data file by lines
var counter:int = lines.length -1;
var map:Array = new Array();
//start from the last line. Keep adding rows until 'data=' line is found
while(lines[counter] != "data=" && counter > 0)
{
var row:Array = lines[counter].split(',');
if(row[row.length -1] == "")
{
row.pop(); //gets rid of trailing comma for all but last row
}
map.unshift(row);
counter--;
}
//just to see how it looks
for(var key in map)
{
trace("\n" + map[key]);
}
return map;
}