Overview Schematic

Untitled

Facial Recognition

We will have our faces related to the user_id on an S3 Bucket, then whenever we want to compare 2 faces we send the data from the camera feed to compare with the existing image on the cloud and bring us the similarity of those faces, and with this similarity we can take further actions as flag a suspicious behaviour.

Untitled

Response:

{
    "SourceImageFace": {
        "BoundingBox": {
            "Width": 0.5586434006690979,
            "Height": 0.4251496493816376,
            "Left": 0.12652510404586792,
            "Top": 0.4789287745952606
        },
        "Confidence": 99.99357604980469
    },
    "FaceMatches": [
        {
            "Similarity": 99.99967956542969,
            "Face": {
                "BoundingBox": {
                    "Width": 0.5577617287635803,
                    "Height": 0.4288221597671509,
                    "Left": 0.18876595795154572,
                    "Top": 0.36573100090026855
                },
                "Confidence": 99.99747467041016,
                "Landmarks": [
                    {
                        "Type": "eyeLeft",
                        "X": 0.36277902126312256,
                        "Y": 0.5275181531906128
                    },
                    {
                        "Type": "eyeRight",
                        "X": 0.6142218112945557,
                        "Y": 0.5272752046585083
                    },
                    {
                        "Type": "mouthLeft",
                        "X": 0.3863504230976105,
                        "Y": 0.6814926862716675
                    },
                    {
                        "Type": "mouthRight",
                        "X": 0.5961042046546936,
                        "Y": 0.6811373233795166
                    },
                    {
                        "Type": "nose",
                        "X": 0.5081897377967834,
                        "Y": 0.605715274810791
                    }
                ],
                "Pose": {
                    "Roll": 0.020386720076203346,
                    "Yaw": 6.3583292961120605,
                    "Pitch": 6.764492034912109
                },
                "Quality": {
                    "Brightness": 83.9613265991211,
                    "Sharpness": 86.86019134521484
                }
            }
        }
    ],
    "UnmatchedFaces": []
}

Crowd Counting

With Label detection we can find the registered Person itens found on the provided image, then sve it to an S3 for usage on the front-end webapp

Untitled

Untitled

This data will then be stored on the database to feed the crowd counting for specific tube station.

Response

{
    "Labels": [
        {
            "Name": "Person",
            "Confidence": 99.81456756591797,
            "Instances": [
                {
                    "BoundingBox": {
                        "Width": 0.07794022560119629,
                        "Height": 0.4095735251903534,
                        "Left": 0.3317995071411133,
                        "Top": 0.5786172151565552
                    },
                    "Confidence": 99.81456756591797
                },
                {
                    "BoundingBox": {
                        "Width": 0.07646293938159943,
                        "Height": 0.3649461567401886,
                        "Left": 0.6212862730026245,
                        "Top": 0.6116830706596375
                    },
                    "Confidence": 99.41024780273438
                },
                {
                    "BoundingBox": {
                        "Width": 0.12360916286706924,
                        "Height": 0.36818960309028625,
                        "Left": 0.6504062414169312,
                        "Top": 0.6200152039527893
                    },
                    "Confidence": 99.05335998535156
                },
                {
                    "BoundingBox": {
                        "Width": 0.2075945883989334,
                        "Height": 0.37058085203170776,
                        "Left": 0.11943928152322769,
                        "Top": 0.6123695373535156
                    },
                    "Confidence": 98.29456329345703
                },
                {
                    "BoundingBox": {
                        "Width": 0.1427251398563385,
                        "Height": 0.4253500699996948,
                        "Left": 0.053081970661878586,
                        "Top": 0.5626960396766663
                    },
                    "Confidence": 98.10002136230469
                },
                {
                    "BoundingBox": {
                        "Width": 0.0727170929312706,
                        "Height": 0.3228701949119568,
                        "Left": 0.40400275588035583,
                        "Top": 0.6467138528823853
                    },
                    "Confidence": 97.90043640136719
                },
                {
                    "BoundingBox": {
                        "Width": 0.038916490972042084,
                        "Height": 0.11878227442502975,
                        "Left": 0.4901525378227234,
                        "Top": 0.6192981600761414
                    },
                    "Confidence": 89.21994018554688
                },
                {
                    "BoundingBox": {
                        "Width": 0.07549972832202911,
                        "Height": 0.3127876818180084,
                        "Left": 0.4946352541446686,
                        "Top": 0.6164150238037109
                    },
                    "Confidence": 86.29780578613281
                },
                {
                    "BoundingBox": {
                        "Width": 0.021832847967743874,
                        "Height": 0.07731737196445465,
                        "Left": 0.5681344270706177,
                        "Top": 0.6272901892662048
                    },
                    "Confidence": 79.6637191772461
                }
            ],
            "Parents": []
        },
        {
            "Name": "Human",
            "Confidence": 99.81456756591797,
            "Instances": [],
            "Parents": []
        },
        {
            "Name": "Pedestrian",
            "Confidence": 90.71568298339844,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                }
            ]
        },
        {
            "Name": "Clothing",
            "Confidence": 89.9275131225586,
            "Instances": [],
            "Parents": []
        },
        {
            "Name": "Apparel",
            "Confidence": 89.9275131225586,
            "Instances": [],
            "Parents": []
        },
        {
            "Name": "Car",
            "Confidence": 85.23780822753906,
            "Instances": [
                {
                    "BoundingBox": {
                        "Width": 0.9881044626235962,
                        "Height": 0.998305082321167,
                        "Left": 0.0029286860954016447,
                        "Top": 0
                    },
                    "Confidence": 85.23780822753906
                }
            ],
            "Parents": [
                {
                    "Name": "Vehicle"
                },
                {
                    "Name": "Transportation"
                }
            ]
        },
        {
            "Name": "Automobile",
            "Confidence": 85.23780822753906,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Vehicle"
                },
                {
                    "Name": "Transportation"
                }
            ]
        },
        {
            "Name": "Transportation",
            "Confidence": 85.23780822753906,
            "Instances": [],
            "Parents": []
        },
        {
            "Name": "Vehicle",
            "Confidence": 85.23780822753906,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Transportation"
                }
            ]
        },
        {
            "Name": "People",
            "Confidence": 80.5057144165039,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                }
            ]
        },
        {
            "Name": "Crowd",
            "Confidence": 73.47538757324219,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                }
            ]
        },
        {
            "Name": "Photography",
            "Confidence": 60.25484848022461,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                }
            ]
        },
        {
            "Name": "Photo",
            "Confidence": 60.25484848022461,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                }
            ]
        },
        {
            "Name": "Pants",
            "Confidence": 57.360443115234375,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Clothing"
                }
            ]
        },
        {
            "Name": "Coat",
            "Confidence": 56.86234664916992,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Clothing"
                }
            ]
        },
        {
            "Name": "Musician",
            "Confidence": 55.12335968017578,
            "Instances": [],
            "Parents": [
                {
                    "Name": "Person"
                },
                {
                    "Name": "Musical Instrument"
                }
            ]
        },
        {
            "Name": "Musical Instrument",
            "Confidence": 55.12335968017578,
            "Instances": [],
            "Parents": []
        }
    ],
    "LabelModelVersion": "2.0"
}