Does naming of the ops impacts the memory/compute performance of TensorFlow? - tensorflow

To make the question clear lets use an example. Assume that we pass a huge Tensor to a series of operations (reshape, transpose, etc.), is it more memory/compute/etc efficient to keep using the same variable name or it does not matter? See two cases below:
Case 1: change name
x = Conv2d (...)
x_transposed = tf.transpose(x)
x_expanded = tf.expand_dims(x_transposed , -1)
x_reshaped = tf.reshape(x_expanded , [...])
Case 2: keep names
x = Conv2d (...)
x = tf.transpose(x)
x = tf.expand_dims(x, -1)
x = tf.reshape(x, [...])

By converting the lines from the code snippet provided into two different Python functions, wrapping them with tf.function to compile them into a callable Tensorflow graph (see here for more information), and printing the concrete graph, it appears they are both identical, indicating the variable names utilized do not make a difference when constructing the graph. The example below should illustrate (tweaked slightly from the provided snippet):
import tensorflow as tf
def same_name():
x = tf.convert_to_tensor([1, 2, 3], dtype=tf.float32)
x = tf.transpose(x)
x = tf.expand_dims(x, -1)
x = tf.reshape(x, [3, 1])
x = tf.nn.relu(x)
def diff_name():
x = tf.convert_to_tensor([1, 2, 3], dtype=tf.float32)
x_transposed = tf.transpose(x)
x_expanded = tf.expand_dims(x_transposed, -1)
x_reshaped = tf.reshape(x_expanded, [3, 1])
x_relued = tf.nn.relu(x_reshaped)
if __name__ == "__main__":
print(tf.function(same_name).get_concrete_function().graph.as_graph_def())
print(tf.function(diff_name).get_concrete_function().graph.as_graph_def())
The output in both cases is:
node {
name: "Const"
op: "Const"
attr {
key: "dtype"
value {
type: DT_FLOAT
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_FLOAT
tensor_shape {
dim {
size: 3
}
}
tensor_content: "\000\000\200?\000\000\000#\000\000##"
}
}
}
}
node {
name: "transpose/perm"
op: "Const"
attr {
key: "dtype"
value {
type: DT_INT32
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_INT32
tensor_shape {
dim {
size: 1
}
}
int_val: 0
}
}
}
}
node {
name: "transpose"
op: "Transpose"
input: "Const"
input: "transpose/perm"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
attr {
key: "Tperm"
value {
type: DT_INT32
}
}
}
node {
name: "ExpandDims/dim"
op: "Const"
attr {
key: "dtype"
value {
type: DT_INT32
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_INT32
tensor_shape {
}
int_val: -1
}
}
}
}
node {
name: "ExpandDims"
op: "ExpandDims"
input: "transpose"
input: "ExpandDims/dim"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
attr {
key: "Tdim"
value {
type: DT_INT32
}
}
}
node {
name: "Reshape/shape"
op: "Const"
attr {
key: "dtype"
value {
type: DT_INT32
}
}
attr {
key: "value"
value {
tensor {
dtype: DT_INT32
tensor_shape {
dim {
size: 2
}
}
tensor_content: "\003\000\000\000\001\000\000\000"
}
}
}
}
node {
name: "Reshape"
op: "Reshape"
input: "ExpandDims"
input: "Reshape/shape"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
attr {
key: "Tshape"
value {
type: DT_INT32
}
}
}
node {
name: "Relu"
op: "Relu"
input: "Reshape"
attr {
key: "T"
value {
type: DT_FLOAT
}
}
}
versions {
producer: 440
}

Related

ReferenceError: halalMeatInstance is not defined

I was doing a test in truffle. But i encountered this error, "ReferenceError: halalMeatInstance is not defined". I am pretty new in this truffle test thingy, actually this is my first time playing around with smart contracts and truffle test realm. It will be really helpful if anyone can point me out the mistake i made and give a solution. Thank you so much! the test file code is added below.
const { expectedEvent, BN } = require("#openzeppelin/test-helpers");
const HDWalletProvider = require("#truffle/hdwallet-provider");
const Web3 = require("Web3");
const HalalMeat = artifacts.require("HalalMeat");
contract("HalalMeat", (accounts) => {
before(async () => {
this.owner = accounts[0];
this.MEAT_BRANDS = {
GHagro: "Golden Harvest Agro Industries",
Kazifood: "Kazi Food Industries",
AGagro: "Ahsan Group Agro Foods",
Bengalmeat: "Bengal Meat ltd",
Sagro: "Sadek Agro ltd"
};
//enums
this.ModeEnums = {
ISSUER: { val: "ISSUER", pos: 0 },
VERIFIER: { val: "VERIFIER", pos: 1 },
PROVER: { val: "PROVER", pos: 2 }
};
this.StatusEnums = {
//MANUFACTURED, DELIVERING_INTERNATIONAL, STORED, DELIVERING_LOCAL, DELIVERED
manufactured: { val: "IMANUFACTURED", pos: 0 },
delivering1: { val: "DELIVERING_INTERNATIONAL", pos: 1 },
stored: { val: "STORED", pos: 2 },
delivering2: { val: "DELIVERING_LOCAL", pos: 3 },
delivered: { val: "DELIVERED", pos: 4 }
};
this.defaultEntities = {
farmA: { id: accounts[1], mode: this.ModeEnums.PROVER.val },
farmB: { id: accounts[2], mode: this.ModeEnums.PROVER.val },
inspector: { id: accounts[3], mode: this.ModeEnums.ISSUER.val },
distributorGlobal: { id: accounts[4], mode: this.ModeEnums.VERIFIER.val },
distributorLocal: { id: accounts[5], mode: this.ModeEnums.VERIFIER.val },
seller: { id: accounts[6], mode: this.ModeEnums.ISSUER.val },
customer: { id: accounts[7], mode: this.ModeEnums.PROVER.val }
};
this.defaultMeatBatches ={
0: { brand: this.MEAT_BRANDS.Bengalmeat, farm: this.defaultEntities.farmA.id },
1: { brand: this.MEAT_BRANDS.Sagro, farm: this.defaultEntities.farmA.id },
2: { brand: this.MEAT_BRANDS.Kazifood, farm: this.defaultEntities.farmA.id },
3: { brand: this.MEAT_BRANDS.GHagro, farm: this.defaultEntities.farmA.id },
4: { brand: this.MEAT_BRANDS.AGagro, farm: this.defaultEntities.farmA.id },
5: { brand: this.MEAT_BRANDS.Bengalmeat, farm: this.defaultEntities.farmB.id },
6: { brand: this.MEAT_BRANDS.Sagro, farm: this.defaultEntities.farmB.id },
7: { brand: this.MEAT_BRANDS.Kazifood, farm: this.defaultEntities.farmB.id },
8: { brand: this.MEAT_BRANDS.GHagro, farm: this.defaultEntities.farmB.id },
9: { brand: this.MEAT_BRANDS.AGagro, farm: this.defaultEntities.farmB.id }
};
this.halalMeatInstance = await HalalMeat.deployed();
this.providerOrUrl = "http://localhost:8545";
});
it('should add entities successfully', async () => {
for(const entity in this.defaultEntities){
const { id, mode } = this.defaultEntities[entity];
const result = await halalMeatInstance.addEntity(
id,
mode,
{ from: this.owner}
);
console.log(result);
expectedEvent(result.receipt, "AddEntity", {
entityId: id,
entityMode: mode
});
//assert.equal(actual, expected, errorMessage);
}
});
});
i tried truffle test. got reference error but was expecting an emit event.
Contract: HalalMeaton list from solc-bin. Attempt #1
1) should add entities successfullybin. Attempt #1
> No events were emitted
0 passing (99ms)
1 failing
Contract: HalalMeat
should add entities successfully:
ReferenceError: halalMeatInstance is not defined
at Context. (test\halalmeat.js:74:22)
at processTicksAndRejections (node:internal/process/task_queues:95:5)
this is the error message i got when i used truffle test

How can I configure the model to detect (the same) keypoints for multiple classes?

I want to do keypoint estimation, but rather than just having class "person" I have multiple person classes such as "person_standing", "person_sitting", etc. I can't figure out what my label_map.pbtxt and pipeline.config files should look like, despite trying multiple approaches.
For example, I tried making a single "person" class, and then making different classes as descendants, but the model wasn't detecting keypoints for those and the keypoint losses were 0:
Label map:
item {
name: "/m/01g317"
id: 1
display_name: "person"
keypoints {
id: 0
label: "nose"
}
...
keypoints {
id: 16
label: "right_ankle"
}
descendant_ids: 2
descendant_ids: 3
descemdant_ids: 4
}
item {
name: "person_standing"
id: 2
keypoints {
id: 0
label: "nose"
}
...
keypoints {
id: 16
label: "right_ankle"
}
ancestor_ids: 1
}
item {
name: "person_sitting"
id: 3
keypoints {
id: 0
label: "nose"
}
...
keypoints {
id: 16
label: "right_ankle"
}
ancestor_ids: 1
}
item {
name: "person_lying_down"
id: 4
keypoints {
id: 0
label: "nose"
}
...
keypoints {
id: 16
label: "right_ankle"
}
ancestor_ids: 1
}
Relevant pipeline.config section:
keypoint_estimation_task {
task_name: "human_pose"
task_loss_weight: 1.0
loss {
localization_loss {
l1_localization_loss {
}
}
classification_loss {
penalty_reduced_logistic_focal_loss {
alpha: 2.0
beta: 4.0
}
}
}
keypoint_class_name: "/m/01g317"
keypoint_label_to_std {
key: "left_ankle"
value: 0.89
}
...
keypoint_label_to_std {
key: "right_wrist"
value: 0.62
}
keypoint_regression_loss_weight: 0.1
keypoint_heatmap_loss_weight: 1.0
keypoint_offset_loss_weight: 1.0
offset_peak_radius: 3
per_keypoint_offset: true
}
I also tried creating an individual keypoint_estimation_task for each class (although I suspected that would increase the inference time by a lot), but apparently using the same keypoint keys in different classes raises an error.
Any help?

Mongodb query optimization for where clause

I am using MongoDB version 3.2.8. I am executing db.Member.find({$where: "var d = new Date(this.Birthdate); return d.getUTCDate() === 4 && d.getUTCMonth() === 2 && d.getUTCFullYear() !== 2017" }) It is taking too much time to execute this query on my local mongo. Is there any alternative for this query so query can optimize?
You can try using the MongoDB Aggregation Framework. I tested using the Mingo library for Javascript
Example:
var mingo = require('mingo')
var data = [{
_id: 100,
Birthdate: new Date("1995-02-04")
}]
var pipeline = [
{
$project: { M: { $month: "$Birthdate"}, Y: { $year: "$Birthdate"}, D: { $dayOfMonth: "$Birthdate"}, Birthdate: 1 }
},
{
$match: { $and: [ { D: 4 }, { M: 2 }, {Y: { $ne: 2017 } } ] }
},
{
$project: { M: 0, D: 0, Y: 0 }
}
]
var result = mingo.aggregate(data, pipeline)
console.log(result)
// Output
// [ { Birthdate: 1995-02-04T00:00:00.000Z, _id: 100 } ]
For MongoDB:
db.Member.aggregate(pipeline)

Lodash: What's the opposite of `_uniq()`?

The _.uniq() in lodash removes duplicates from an array:
var tst = [
{ "topicId":1,"subTopicId":1,"topicName":"a","subTopicName1":"w" },
{ "topicId":2,"subTopicId":2,"topicName":"b","subTopicName2":"x" },
{ "topicId":3,"subTopicId":3,"topicName":"c","subTopicName3":"y" },
{ "topicId":1,"subTopicId":4,"topicName":"c","subTopicName4":"z" }]
var t = _.uniq(tst, 'topicName')
This returns:
[ {"topicId":1,"subTopicId":1,"topicName":"a","subTopicName1":"w" },
{ topicId: 2, subTopicId: 2, topicName: 'b', subTopicName2: 'x' },
{ topicId: 3, subTopicId: 3, topicName: 'c', subTopicName3: 'y' } ]
What's the opposite of this? It should only return a single object for each duplicate object:
[ { topicId: 3, subTopicId: 3, topicName: 'c', subTopicName3: 'y' } ]
I don't think there's a built in method, here's something that should do the job:
function dupesOnly(arr, field) {
var seen = {},
ret = [];
arr.forEach(function(item) {
var key = item[field],
val = seen[key];
if (!val) {
seen[key] = val = {
initial: item,
count: 0
}
}
if (val.count === 1) {
ret.push(val.initial);
}
++val.count;
});
return ret;
}

Asserting all array elements are objects

How can I assert that all elements of list are objects?
should.exist(list)
list.should.be.an('array')
... // ?
If we are talking about should, you can use such method:
> var should = require('./');
> var list1 = [1, 2, 3];
> var list2 = [1, { a: 10}, 3];
> var list3 = [{ a: 11}, {b: 10}];
> list1.should.matchEvery(function(it) { return it.should.be.an.Object(); });
AssertionError: expected Array [ 1, 2, 3 ] to match each Function { name: '' }
expected 1 to match Function { name: '' }
expected 1 to be an object
expected 1 to have type object
expected 'number' to be 'object'
at Assertion.fail (/Users/den/Projects/shouldjs/should.js/lib/assertion.js:180:17)
at Assertion.prop.value (/Users/den/Projects/shouldjs/should.js/lib/assertion.js:65:17)
...
> list2.should.matchEvery(function(it) { return it.should.be.an.Object(); });
AssertionError: expected Array [ 1, Object { a: 10 }, 3 ] to match each Function { name: '' }
expected 1 to match Function { name: '' }
expected 1 to be an object
expected 1 to have type object
expected 'number' to be 'object'
at Assertion.fail (/Users/den/Projects/shouldjs/should.js/lib/assertion.js:180:17)
at Assertion.prop.value (/Users/den/Projects/shouldjs/should.js/lib/assertion.js:65:17)
...
> list3.should.matchEvery(function(it) { return it.should.be.an.Object(); });
{ obj: [ { a: 11 }, { b: 10 } ],
anyOne: false,
negate: false,
params:
{ operator: 'to match each Function { name: \'\' }',
message: undefined } }
>
So i have used .matchEvery and checked that each element is object. You can refer to api docs for more examples.