Filter time range in Mongodb - sql

I want to get all the documents that fall within a particular TIME in MongoDB. I have searched for this, but I can only find solutions for documents that fall within a particular DATE.
For example, the SQL expression of what I am looking for would be: WHERE YEAR(date_time) = 2019 AND TIME(date_time) BETWEEN '07:30' AND '08:30'

Demo - https://mongoplayground.net/p/Ny5FCEiQkE7
Use $expr
db.collection.aggregate([{
$match: {
$expr: { $eq: [ { $year: "$dt" }, 2021 ] }, // match year
$or: [ // or query
{ $and: [ // match hour 7 and minutes 30+
{ $expr: { $eq: [ { "$hour": "$dt" }, 7 ] } },
{ $expr: { $gte: [ { "$minute": "$dt" }, 30 ] } }
]
},
{ $and: [ // match hour 8 and minutes less than 30
{ $expr: { $eq: [ { "$hour": "$dt" }, 8 ] } },
{ $expr: { $lt: [ { "$minute": "$dt" }, 30 ] } }
]
},
]
}
}])

You can use this one:
db.collection.aggregate([
{
$addFields: {
parts: {
$dateToParts: {
date: "$date_time" // , timezone : <timezone> if needed
}
}
}
},
{ $set: { "parts.time": { $sum: [{ $multiply: ["$parts.hour", 24] }, "$parts.minute"] } } },
{
$match: {
"parts.year": 2019,
"parts.time": {
$gte: { $sum: [{ $multiply: [7, 24] }, 30] },
$lte: { $sum: [{ $multiply: [8, 24] }, 30] }
}
}
}
])
Another solution could be this one:
db.collection.aggregate([
{
$addFields: {
time: {
$dateFromParts: {
year: { $year: "$date_time" }, month: 1, day: 1,
hour: { $hour: "$date_time" }, minute: { $minute: "$date_time" }
// , timezone : <timezone> if needed
}
}
}
},
{
$match: {
time: {
$gte: ISODate("2019-01-01T07:30:00Z"),
$lte: ISODate("2019-01-01T08:30:00Z")
}
}
}
])

Related

How to group by in MongoDB Compass based on the time?

Recently I'm working with a Mongodb database. Here is the data model of the document I need to run a query on:
{
"creation_date": {
"$date": {
"$numberLong": "1641981205813"
}
},
"user_id": {
"$oid": "61dedd8b7a520461dd78016b"
},
"products": [
{
"_id": {
"$oid": "61dede397a520461dd7818bd"
},
"product_id": {
"$oid": "615071ae8b66e1e9a3d6ea50"
},
"payment": true,
"support_all_payment": false,
"term_ids": null
}
],
"carts_info": [
{
"_id": {
"$oid": "61dede397a520461dd7818be"
},
"support_type": null,
"support_price": 0,
"product_price": 11000,
"product_type": "all",
"final_price": 11000,
"product_id": {
"$oid": "615071ae8b66e1e9a3d6ea50"
}
}
],
"_des": "initial_payment",
"_type": "online",
"_token": "9e0cb4d111f642f1a6f482bb04f1f57b",
"_price": 11000,
"_status": "unpaid",
"_terminal_id": "12605682",
"__v": 0,
"additional_information": {
"saman_bank": {
"MID": "0",
"ResNum": "61dede387a520461dd7818bb",
"State": "CanceledByUser",
"TraceNo": "",
"Wage": "",
"Rrn": "",
"SecurePan": "",
"HashedCardNumber": "",
"Status": "1"
}
}
}
This collection is user orders. I need to count the orders for today. So, I need such a equivalent query for Mongodb Compass the same as this SQL:
SELECT count(1) num,
date(creation_date) date
FROM orders
WHERE date(creation_date) = "2023-02-16"
GROUP BY date
Any idea how can I run this logic on Mongodb Compass?
Use $dateTrunc to perform date only operations.
db.collection.aggregate([
{
"$match": {
$expr: {
$eq: [
{
$dateTrunc: {
date: "$creation_date",
unit: "day"
}
},
ISODate("2022-01-12")
]
}
}
},
{
$group: {
_id: {
$dateTrunc: {
date: "$creation_date",
unit: "day"
}
},
num: {
$sum: 1
}
}
}
])
Mongo Playground
For OP's MongoDB v3.6, we can use $dateToString to perform string comparison on a date-only string.
db.collection.aggregate([
{
$addFields: {
dateOnly: {
"$dateToString": {
"date": "$creation_date",
"format": "%Y-%m-%d"
}
}
}
},
{
$match: {
dateOnly: "2022-01-12"
}
},
{
$group: {
_id: null,
num: {
$sum: 1
}
}
}
])
Mongo Playground

MongoDB Lookup values based on dynamic field name

I'm pretty sure the below can be done, I'm struggling to understand how to do it in MongoDB.
My data is structured like this (demo data):
db={
"recipes": [
{
"id": 1,
"name": "flatbread pizza",
"ingredients": {
"1010": 1,
"1020": 2,
"1030": 200
}
},
{
"id": 2,
"name": "cheese sandwich",
"ingredients": {
"1040": 1,
"1050": 2
}
}
],
"ingredients": [
{
"id": 1010,
"name": "flatbread",
"unit": "pieces"
},
{
"id": 1020,
"name": "garlic",
"unit": "clove"
},
{
"id": 1030,
"name": "tomato sauce",
"unit": "ml"
},
{
"id": 1040,
"name": "bread",
"unit": "slices"
},
{
"id": 1050,
"name": "cheese",
"unit": "slices"
}
]
}
The output I'm trying to achieve would look like this:
[
{
"id": 1,
"name": "flatbread pizza",
“flatbread”: “1 pieces”,
“garlic”: “2 cloves”,
“tomato sauce”: “200 ml”
},
{
"id": 2,
"name": "cheese sandwich",
“bread”: “1 slices”,
“cheese”: “2 slices”
}
]
I've tried several approaches, and I get stuck at the bit where I need to do a lookup based on the ingredient name (which actually is the id). I tried using $objectToArray to turn it into a k-v document, but then I get stuck in how to construct the lookup pipeline.
This is not a simple solution, and probably can be improved:
db.recipes.aggregate([
{
"$addFields": {
ingredientsParts: {
"$objectToArray": "$ingredients"
}
}
},
{
$unwind: "$ingredientsParts"
},
{
"$group": {
_id: "$id",
name: {
$first: "$name"
},
ingredientsParts: {
$push: {
v: "$ingredientsParts.v",
id: {
$toInt: "$ingredientsParts.k"
}
}
}
}
},
{
"$lookup": {
"from": "ingredients",
"localField": "ingredientsParts.id",
"foreignField": "id",
"as": "ingredients"
}
},
{
$unwind: "$ingredients"
},
{
"$addFields": {
"ingredientsPart": {
"$filter": {
input: "$ingredientsParts",
as: "item",
cond: {
$eq: [
"$$item.id",
"$ingredients.id"
]
}
}
}
}
},
{
$project: {
ingredients: 1,
ingredientsPart: {
"$arrayElemAt": [
"$ingredientsPart",
0
]
},
name: 1
}
},
{
"$addFields": {
units: {
k: "$ingredients.name",
v: {
"$concat": [
{
$toString: "$ingredientsPart.v"
},
" ",
"$ingredients.unit"
]
}
}
}
},
{
$group: {
_id: "$_id",
name: {
$first: "$name"
},
units: {
$push: "$units"
}
}
},
{
"$addFields": {
"data": {
"$arrayToObject": "$units"
}
}
},
{
"$addFields": {
"data.id": "$_id",
"data.name": "$name"
}
},
{
"$replaceRoot": {
"newRoot": "$data"
}
}
])
You can see it works here
As rickhg12hs said, it can be modeled better.

How to add a field in the mongoDB query representing a total figure

I have a mongodb query
[
{$group: {
_id: '$status',
count: {$sum: NumberInt(1)}
}}
]
The query returns the following result:
[
{ _id: "A", count: 22 },
{ _id: "B", count: 33 },
{ _id: "C", count: 44 }
]
Based on the above, I need to add a new field "totalCount" (e.g. 22 + 33 + 44 = 99) as in the following:
[
{ _id: "A", count: 22, totalCount: 99 },
{ _id: "B", count: 33, totalCount: 99 },
{ _id: "C", count: 44, totalCount: 99 }
]
Any help or clue is highly appreciated.
You have to make use of $facet stage to create multiple instances of aggregation, calculate the total count in a different stage and finally merge into a single output.
db.collection.aggregate([
{
"$facet": {
"totalCount": [
{
"$group": {
"_id": null,
"totalCount": {
"$sum": "$count"
}
},
},
],
"root": [
{
"$replaceRoot": {
"newRoot": "$$ROOT"
}
},
]
}
},
{
"$unwind": "$root"
},
{
"$replaceRoot": {
"newRoot": {
"$mergeObjects": [
"$root",
{
"$arrayElemAt": [
"$totalCount",
0
]
}
],
}
}
},
])
Mongo Sample Execution

Need to get the following query to output correctly

Hi guys I've been trying all day to construct this simple mongo query but I can't get the desire output. Below is the query and the current output.
db.customers.aggregate(
{ $match : { "status" : "Closed" } },
{ $unwind: "$lines" },
{ $group : {
_id:{label: "$lines.label",date: {$substr: [ "$date", 0, 10 ]}},
values: { $push: { date: {$substr: [ "$date", 0, 10 ]}} },
count: { $sum: 1 }
}},
{ $project : {
_id : 1,
values:1,
count:1
}}
);
Which outputs the following.
{
"result": [
{
"_id": {
"label": "label",
"date": "2010-10-01"
},
"values": [
{
"date": "2010-10-01"
},
{
"date": "2010-10-01"
},
{
"date": "2010-10-01"
},
{
"date": "2010-10-01"
}
],
"count": 4
},
{
"_id": {
"label": "label",
"date": "2010-10-10"
},
"values": [
{
"date": "2010-10-10"
}
],
"count": 1
},
{
"_id": {
"label": "label",
"date": "2010-07-25"
},
"values": [
{
"date": "2010-07-25"
}
],
"count": 1
}
]
}
However the output below is the one that I'm looking for and just can't get. I can obviously get all the data I desire, just in the wrong places.
{
"result": [
{
"_id": "label",
"values": [
{
"date": "2010-11-27",
"count": 4
},
{
"date": "2010-10-10",
"count": 1
},
{
"date": "2010-07-25",
"count": 1
}
]
}
]
}
Like always thanks for the help and support.
Can you try this:
db.customers.aggregate([
{ $match : { "status" : "Closed" } },
{ $unwind: "$lines" },
{ $group : {
_id:{label: "$lines.label",date: {$substr: [ "$date", 0, 10 ]}},
values: { $push: { date: {$substr: [ "$date", 0, 10 ]}} },
count: { $sum: 1 }
}},
// This one I added to group them behind a single label in an array list
{ $group : {
_id:{
label: "$_id.label"
},
values : {
$push : { date : "$date", count : "$count" }
}
}
},
{ $project : {
_id : 1,
values:1,
count:1
}
}
]);
If I got your problem right, you like to group the counts + dates in an values array. That you can do with $push after the 1st group stage.

RabbitMQ won't start after power failure

I'm running a RabbitMQ server instance on my machine. But after a power failure in the building I can't get Rabbit to start.
There is no other service running on the same TCP-port and I've tried to reinstall both rabbit and erlang but nothing seems to work.
I'm running:
CentOS release 5.4 (Final)
Rabbit rabbitmq-server-2.8.2-1.noarch
erlang-R14B-03.3.el5.x86_64
The startup_error log is empty but the startup_log contains:
{
error_logger,
{
{2012,5,10},
{9,35,43}
},
"Protocol: ~p: register error: ~p~n",
[
"inet_tcp",
{
{
badmatch,
{
error,
econnrefused
}
},
[
{
inet_tcp_dist,
listen,
1
},
{
net_kernel,
start_protos,
4
},
{
net_kernel,
start_protos,
3
},
{
net_kernel,
init_node,
2
},
{
net_kernel,
init,
1
},
{
gen_server,
init_it,
6
},
{
proc_lib,
init_p_do_apply,
3
}
]
}
]
}{
error_logger,
{
{2012,5,10},
{9,35,43}
},
crash_report,
[
[
{
initial_call,
{
net_kernel,
init,
[
'Argument__1'
]
}
},
{
pid,
<0.20.0>
},
{
registered_name,
[
]
},
{
error_info,
{
exit,
{
error,
badarg
},
[
{
gen_server,
init_it,
6
},
{
proc_lib,
init_p_do_apply,
3
}
]
}
},
{
ancestors,
[
net_sup,
kernel_sup,
<0.9.0>
]
},
{
messages,
[
]
},
{
links,
[
#Port<0.90>,
<0.17.0>
]
},
{
dictionary,
[
{
longnames,
false
}
]
},
{
trap_exit,
true
},
{
status,
running
},
{
heap_size,
610
},
{
stack_size,
24
},
{
reductions,
509
}
],
[
]
]
}{
error_logger,
{
{2012,5,10},
{9,35,43}
},
supervisor_report,
[
{
supervisor,
{
local,
net_sup
}
},
{
errorContext,
start_error
},
{
reason,
{
'EXIT',
nodistribution
}
},
{
offender,
[
{
pid,
undefined
},
{
name,
net_kernel
},
{
mfargs,
{
net_kernel,
start_link,
[
[
rabbitmqprelaunch4868,
shortnames
]
]
}
},
{
restart_type,
permanent
},
{
shutdown,
2000
},
{
child_type,
worker
}
]
}
]
}{
error_logger,
{
{
2012,
5,
10
},
{
9,
35,
43
}
},
supervisor_report,
[
{
supervisor,
{
local,
kernel_sup
}
},
{
errorContext,
start_error
},
{
reason,
shutdown
},
{
offender,
[
{
pid,
undefined
},
{
name,
net_sup
},
{
mfargs,
{
erl_distribution,
start_link,
[
]
}
},
{
restart_type,
permanent
},
{
shutdown,
infinity
},
{
child_type,
supervisor
}
]
}
]
}{
error_logger,
{
{
2012,
5,
10
},
{
9,
35,
43
}
},
std_info,
[
{
application,
kernel
},
{
exited,
{
shutdown,
{
kernel,
start,
[
normal,
[
]
]
}
}
},
{
type,
permanent
}
]
}{
"Kernel pid terminated",
application_controller,
"{application_start_failure,kernel,{shutdown,{kernel,start,[normal,[]]}}}"
}
Anyone got an idea?
econnrefused in the beginning clearly indicates that something's listening on the same port, netstat -plten could have shown what was it.