{"id":18783391,"url":"https://github.com/futuresea-dev/recognizer-app","last_synced_at":"2026-04-09T02:31:16.203Z","repository":{"id":157980038,"uuid":"403581717","full_name":"futuresea-dev/recognizer-APP","owner":"futuresea-dev","description":"recognizer app","archived":false,"fork":false,"pushed_at":"2021-09-06T10:40:14.000Z","size":33498,"stargazers_count":1,"open_issues_count":0,"forks_count":2,"subscribers_count":3,"default_branch":"main","last_synced_at":"2025-08-01T13:03:48.002Z","etag":null,"topics":["cmake","datetime","detection","docker","gui","image","imageprocessing","java","ocr","ocr-python","ocr-recognition","opencv","opencv-python","pickle","pillow","python","threading","tkinter"],"latest_commit_sha":null,"homepage":"","language":"Python","has_issues":true,"has_wiki":null,"has_pages":null,"mirror_url":null,"source_name":null,"license":"gpl-3.0","status":null,"scm":"git","pull_requests_enabled":true,"icon_url":"https://github.com/futuresea-dev.png","metadata":{"files":{"readme":"README.md","changelog":null,"contributing":"CONTRIBUTING.md","funding":null,"license":"LICENSE","code_of_conduct":null,"threat_model":null,"audit":null,"citation":null,"codeowners":null,"security":null,"support":null,"governance":null,"roadmap":null,"authors":null,"dei":null,"publiccode":null,"codemeta":null}},"created_at":"2021-09-06T10:35:01.000Z","updated_at":"2021-09-10T17:42:21.000Z","dependencies_parsed_at":null,"dependency_job_id":"0cfadae8-27f8-4cab-becf-1730a2b9af11","html_url":"https://github.com/futuresea-dev/recognizer-APP","commit_stats":null,"previous_names":["futurelife365/recognizer-app","futuresea-dev/recognizer-app"],"tags_count":0,"template":false,"template_full_name":null,"purl":"pkg:github/futuresea-dev/recognizer-APP","repository_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/futuresea-dev%2Frecognizer-APP","tags_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/futuresea-dev%2Frecognizer-APP/tags","releases_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/futuresea-dev%2Frecognizer-APP/releases","manifests_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/futuresea-dev%2Frecognizer-APP/manifests","owner_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/owners/futuresea-dev","download_url":"https://codeload.github.com/futuresea-dev/recognizer-APP/tar.gz/refs/heads/main","sbom_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories/futuresea-dev%2Frecognizer-APP/sbom","scorecard":null,"host":{"name":"GitHub","url":"https://github.com","kind":"github","repositories_count":286080680,"owners_count":31582577,"icon_url":"https://github.com/github.png","version":null,"created_at":"2022-05-30T11:31:42.601Z","updated_at":"2026-04-08T14:31:17.711Z","status":"online","status_checked_at":"2026-04-09T02:00:06.848Z","response_time":112,"last_error":null,"robots_txt_status":"success","robots_txt_updated_at":"2025-07-24T06:49:26.215Z","robots_txt_url":"https://github.com/robots.txt","online":true,"can_crawl_api":true,"host_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub","repositories_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repositories","repository_names_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/repository_names","owners_url":"https://repos.ecosyste.ms/api/v1/hosts/GitHub/owners"}},"keywords":["cmake","datetime","detection","docker","gui","image","imageprocessing","java","ocr","ocr-python","ocr-recognition","opencv","opencv-python","pickle","pillow","python","threading","tkinter"],"created_at":"2024-11-07T20:39:13.707Z","updated_at":"2026-04-09T02:31:16.184Z","avatar_url":"https://github.com/futuresea-dev.png","language":"Python","readme":"\u003cdiv align=\"center\"\u003e\n\u003cp\u003e\n\u003ca align=\"left\" href=\"https://ultralytics.com/yolov5\" target=\"_blank\"\u003e\n\u003cimg width=\"850\" src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/splash.jpg\"\u003e\u003c/a\u003e\n\u003c/p\u003e\n\u003cbr\u003e\n\u003cdiv\u003e\n\u003ca href=\"https://github.com/ultralytics/yolov5/actions\"\u003e\u003cimg src=\"https://github.com/ultralytics/yolov5/workflows/CI%20CPU%20testing/badge.svg\" alt=\"CI CPU testing\"\u003e\u003c/a\u003e\n\u003ca href=\"https://zenodo.org/badge/latestdoi/264818686\"\u003e\u003cimg src=\"https://zenodo.org/badge/264818686.svg\" alt=\"YOLOv5 Citation\"\u003e\u003c/a\u003e\n\u003cbr\u003e  \n\u003ca href=\"https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb\"\u003e\u003cimg src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"\u003e\u003c/a\u003e\n\u003ca href=\"https://www.kaggle.com/ultralytics/yolov5\"\u003e\u003cimg src=\"https://kaggle.com/static/images/open-in-kaggle.svg\" alt=\"Open In Kaggle\"\u003e\u003c/a\u003e\n\u003ca href=\"https://hub.docker.com/r/ultralytics/yolov5\"\u003e\u003cimg src=\"https://img.shields.io/docker/pulls/ultralytics/yolov5?logo=docker\" alt=\"Docker Pulls\"\u003e\u003c/a\u003e\n\u003c/div\u003e\n  \u003cbr\u003e\n  \u003cdiv align=\"center\"\u003e\n    \u003ca href=\"https://github.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-github.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"2%\" /\u003e\n    \u003ca href=\"https://www.linkedin.com/company/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-linkedin.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"2%\" /\u003e\n    \u003ca href=\"https://twitter.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-twitter.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"2%\" /\u003e\n    \u003ca href=\"https://youtube.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-youtube.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"2%\" /\u003e\n    \u003ca href=\"https://www.facebook.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-facebook.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"2%\" /\u003e\n    \u003ca href=\"https://www.instagram.com/ultralytics/\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-instagram.png\" width=\"2%\"/\u003e\n    \u003c/a\u003e\n\u003c/div\u003e\n\n\u003cbr\u003e\n\u003cp\u003e\nYOLOv5 🚀 is a family of object detection architectures and models pretrained on the COCO dataset, and represents \u003ca href=\"https://ultralytics.com\"\u003eUltralytics\u003c/a\u003e\n open-source research into future vision AI methods, incorporating lessons learned and best practices evolved over thousands of hours of research and development.\n\u003c/p\u003e\n\n\u003c!-- \n\u003ca align=\"center\" href=\"https://ultralytics.com/yolov5\" target=\"_blank\"\u003e\n\u003cimg width=\"800\" src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/banner-api.png\"\u003e\u003c/a\u003e\n--\u003e\n\n\u003c/div\u003e\n\n## \u003cdiv align=\"center\"\u003eDocumentation\u003c/div\u003e\n\nSee the [YOLOv5 Docs](https://docs.ultralytics.com) for full documentation on training, testing and deployment.\n\n## \u003cdiv align=\"center\"\u003eQuick Start Examples\u003c/div\u003e\n\n\u003cdetails open\u003e\n\u003csummary\u003eInstall\u003c/summary\u003e\n\n[**Python\u003e=3.6.0**](https://www.python.org/) is required with all\n[requirements.txt](https://github.com/ultralytics/yolov5/blob/master/requirements.txt) installed including\n[**PyTorch\u003e=1.7**](https://pytorch.org/get-started/locally/):\n\u003c!-- $ sudo apt update \u0026\u0026 apt install -y libgl1-mesa-glx libsm6 libxext6 libxrender-dev --\u003e\n\n```bash\n$ git clone https://github.com/ultralytics/yolov5\n$ cd yolov5\n$ pip install -r requirements.txt\n```\n\n\u003c/details\u003e\n\n\u003cdetails open\u003e\n\u003csummary\u003eInference\u003c/summary\u003e\n\nInference with YOLOv5 and [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36). Models automatically download\nfrom the [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases).\n\n```python\nimport torch\n\n# Model\nmodel = torch.hub.load('ultralytics/yolov5', 'yolov5s')  # or yolov5m, yolov5l, yolov5x, custom\n\n# Images\nimg = 'https://ultralytics.com/images/zidane.jpg'  # or file, Path, PIL, OpenCV, numpy, list\n\n# Inference\nresults = model(img)\n\n# Results\nresults.print()  # or .show(), .save(), .crop(), .pandas(), etc.\n```\n\n\u003c/details\u003e\n\n\n\n\u003cdetails\u003e\n\u003csummary\u003eInference with detect.py\u003c/summary\u003e\n\n`detect.py` runs inference on a variety of sources, downloading models automatically from\nthe [latest YOLOv5 release](https://github.com/ultralytics/yolov5/releases) and saving results to `runs/detect`.\n\n```bash\n$ python detect.py --source 0  # webcam\n                            file.jpg  # image \n                            file.mp4  # video\n                            path/  # directory\n                            path/*.jpg  # glob\n                            'https://youtu.be/NUsoVlDFqZg'  # YouTube\n                            'rtsp://example.com/media.mp4'  # RTSP, RTMP, HTTP stream\n```\n\n\u003c/details\u003e\n\n\u003cdetails\u003e\n\u003csummary\u003eTraining\u003c/summary\u003e\n\nRun commands below to reproduce results\non [COCO](https://github.com/ultralytics/yolov5/blob/master/data/scripts/get_coco.sh) dataset (dataset auto-downloads on\nfirst use). Training times for YOLOv5s/m/l/x are 2/4/6/8 days on a single V100 (multi-GPU times faster). Use the\nlargest `--batch-size` your GPU allows (batch sizes shown for 16 GB devices).\n\n```bash\n$ python train.py --data coco.yaml --cfg yolov5s.yaml --weights '' --batch-size 64\n                                         yolov5m                                40\n                                         yolov5l                                24\n                                         yolov5x                                16\n```\n\n\u003cimg width=\"800\" src=\"https://user-images.githubusercontent.com/26833433/90222759-949d8800-ddc1-11ea-9fa1-1c97eed2b963.png\"\u003e\n\n\u003c/details\u003e  \n\n\u003cdetails open\u003e\n\u003csummary\u003eTutorials\u003c/summary\u003e\n\n* [Train Custom Data](https://github.com/ultralytics/yolov5/wiki/Train-Custom-Data)\u0026nbsp; 🚀 RECOMMENDED\n* [Tips for Best Training Results](https://github.com/ultralytics/yolov5/wiki/Tips-for-Best-Training-Results)\u0026nbsp; ☘️\n  RECOMMENDED\n* [Weights \u0026 Biases Logging](https://github.com/ultralytics/yolov5/issues/1289)\u0026nbsp; 🌟 NEW\n* [Supervisely Ecosystem](https://github.com/ultralytics/yolov5/issues/2518)\u0026nbsp; 🌟 NEW\n* [Multi-GPU Training](https://github.com/ultralytics/yolov5/issues/475)\n* [PyTorch Hub](https://github.com/ultralytics/yolov5/issues/36)\u0026nbsp; ⭐ NEW\n* [TorchScript, ONNX, CoreML Export](https://github.com/ultralytics/yolov5/issues/251) 🚀\n* [Test-Time Augmentation (TTA)](https://github.com/ultralytics/yolov5/issues/303)\n* [Model Ensembling](https://github.com/ultralytics/yolov5/issues/318)\n* [Model Pruning/Sparsity](https://github.com/ultralytics/yolov5/issues/304)\n* [Hyperparameter Evolution](https://github.com/ultralytics/yolov5/issues/607)\n* [Transfer Learning with Frozen Layers](https://github.com/ultralytics/yolov5/issues/1314)\u0026nbsp; ⭐ NEW\n* [TensorRT Deployment](https://github.com/wang-xinyu/tensorrtx)\n\n\u003c/details\u003e\n\n## \u003cdiv align=\"center\"\u003eEnvironments and Integrations\u003c/div\u003e\n\nGet started in seconds with our verified environments and integrations,\nincluding [Weights \u0026 Biases](https://wandb.ai/site?utm_campaign=repo_yolo_readme) for automatic YOLOv5 experiment\nlogging. Click each icon below for details.\n\n\u003cdiv align=\"center\"\u003e\n    \u003ca href=\"https://colab.research.google.com/github/ultralytics/yolov5/blob/master/tutorial.ipynb\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-colab-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n    \u003ca href=\"https://www.kaggle.com/ultralytics/yolov5\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-kaggle-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n    \u003ca href=\"https://hub.docker.com/r/ultralytics/yolov5\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-docker-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n    \u003ca href=\"https://github.com/ultralytics/yolov5/wiki/AWS-Quickstart\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-aws-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n    \u003ca href=\"https://github.com/ultralytics/yolov5/wiki/GCP-Quickstart\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-gcp-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n    \u003ca href=\"https://wandb.ai/site?utm_campaign=repo_yolo_readme\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-wb-small.png\" width=\"15%\"/\u003e\n    \u003c/a\u003e\n\u003c/div\u003e  \n\n## \u003cdiv align=\"center\"\u003eCompete and Win\u003c/div\u003e\n\nWe are super excited about our first-ever Ultralytics YOLOv5 🚀 EXPORT Competition with **$10,000** in cash prizes!\n\n\u003cp align=\"center\"\u003e\n  \u003ca href=\"https://github.com/ultralytics/yolov5/discussions/3213\"\u003e\n  \u003cimg width=\"850\" src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/banner-export-competition.png\"\u003e\u003c/a\u003e\n\u003c/p\u003e\n\n## \u003cdiv align=\"center\"\u003eWhy YOLOv5\u003c/div\u003e\n\n\u003cp align=\"center\"\u003e\u003cimg width=\"800\" src=\"https://user-images.githubusercontent.com/26833433/114313216-f0a5e100-9af5-11eb-8445-c682b60da2e3.png\"\u003e\u003c/p\u003e\n\u003cdetails\u003e\n  \u003csummary\u003eYOLOv5-P5 640 Figure (click to expand)\u003c/summary\u003e\n\n\u003cp align=\"center\"\u003e\u003cimg width=\"800\" src=\"https://user-images.githubusercontent.com/26833433/114313219-f1d70e00-9af5-11eb-9973-52b1f98d321a.png\"\u003e\u003c/p\u003e\n\u003c/details\u003e\n\u003cdetails\u003e\n  \u003csummary\u003eFigure Notes (click to expand)\u003c/summary\u003e\n\n* GPU Speed measures end-to-end time per image averaged over 5000 COCO val2017 images using a V100 GPU with batch size\n  32, and includes image preprocessing, PyTorch FP16 inference, postprocessing and NMS.\n* EfficientDet data from [google/automl](https://github.com/google/automl) at batch size 8.\n* **Reproduce** by\n  `python val.py --task study --data coco.yaml --iou 0.7 --weights yolov5s6.pt yolov5m6.pt yolov5l6.pt yolov5x6.pt`\n\n\u003c/details\u003e\n\n### Pretrained Checkpoints\n\n[assets]: https://github.com/ultralytics/yolov5/releases\n\n|Model |size\u003cbr\u003e\u003csup\u003e(pixels) |mAP\u003csup\u003eval\u003cbr\u003e0.5:0.95 |mAP\u003csup\u003etest\u003cbr\u003e0.5:0.95 |mAP\u003csup\u003eval\u003cbr\u003e0.5 |Speed\u003cbr\u003e\u003csup\u003eV100 (ms) | |params\u003cbr\u003e\u003csup\u003e(M) |FLOPs\u003cbr\u003e\u003csup\u003e640 (B)\n|---                    |---  |---      |---      |---      |---     |---|---   |---\n|[YOLOv5s][assets]      |640  |36.7     |36.7     |55.4     |**2.0** |   |7.3   |17.0\n|[YOLOv5m][assets]      |640  |44.5     |44.5     |63.1     |2.7     |   |21.4  |51.3\n|[YOLOv5l][assets]      |640  |48.2     |48.2     |66.9     |3.8     |   |47.0  |115.4\n|[YOLOv5x][assets]      |640  |**50.4** |**50.4** |**68.8** |6.1     |   |87.7  |218.8\n|                       |     |         |         |         |        |   |      |\n|[YOLOv5s6][assets]     |1280 |43.3     |43.3     |61.9     |**4.3** |   |12.7  |17.4\n|[YOLOv5m6][assets]     |1280 |50.5     |50.5     |68.7     |8.4     |   |35.9  |52.4\n|[YOLOv5l6][assets]     |1280 |53.4     |53.4     |71.1     |12.3    |   |77.2  |117.7\n|[YOLOv5x6][assets]     |1280 |**54.4** |**54.4** |**72.0** |22.4    |   |141.8 |222.9\n|                       |     |         |         |         |        |   |      |\n|[YOLOv5x6][assets] TTA |1280 |**55.0** |**55.0** |**72.0** |70.8    |   |-     |-\n\n\u003cdetails\u003e\n  \u003csummary\u003eTable Notes (click to expand)\u003c/summary\u003e\n\n* AP\u003csup\u003etest\u003c/sup\u003e denotes COCO [test-dev2017](http://cocodataset.org/#upload) server results, all other AP results\n  denote val2017 accuracy.\n* AP values are for single-model single-scale unless otherwise noted. **Reproduce mAP**\n  by `python val.py --data coco.yaml --img 640 --conf 0.001 --iou 0.65`\n* Speed\u003csub\u003eGPU\u003c/sub\u003e averaged over 5000 COCO val2017 images using a\n  GCP [n1-standard-16](https://cloud.google.com/compute/docs/machine-types#n1_standard_machine_types) V100 instance, and\n  includes FP16 inference, postprocessing and NMS. **Reproduce speed**\n  by `python val.py --data coco.yaml --img 640 --conf 0.25 --iou 0.45 --half`\n* All checkpoints are trained to 300 epochs with default settings and hyperparameters (no autoaugmentation).\n* Test Time Augmentation ([TTA](https://github.com/ultralytics/yolov5/issues/303)) includes reflection and scale\n  augmentation. **Reproduce TTA** by `python val.py --data coco.yaml --img 1536 --iou 0.7 --augment`\n\n\u003c/details\u003e\n\n## \u003cdiv align=\"center\"\u003eContribute\u003c/div\u003e\n\nWe love your input! We want to make contributing to YOLOv5 as easy and transparent as possible. Please see\nour [Contributing Guide](CONTRIBUTING.md) to get started.\n\n## \u003cdiv align=\"center\"\u003eContact\u003c/div\u003e\n\nFor issues running YOLOv5 please visit [GitHub Issues](https://github.com/ultralytics/yolov5/issues). For business or\nprofessional support requests please visit [https://ultralytics.com/contact](https://ultralytics.com/contact).\n\n\u003cbr\u003e\n\n\u003cdiv align=\"center\"\u003e\n    \u003ca href=\"https://github.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-github.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"3%\" /\u003e\n    \u003ca href=\"https://www.linkedin.com/company/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-linkedin.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"3%\" /\u003e\n    \u003ca href=\"https://twitter.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-twitter.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"3%\" /\u003e\n    \u003ca href=\"https://youtube.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-youtube.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"3%\" /\u003e\n    \u003ca href=\"https://www.facebook.com/ultralytics\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-facebook.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n    \u003cimg width=\"3%\" /\u003e\n    \u003ca href=\"https://www.instagram.com/ultralytics/\"\u003e\n        \u003cimg src=\"https://github.com/ultralytics/yolov5/releases/download/v1.0/logo-social-instagram.png\" width=\"3%\"/\u003e\n    \u003c/a\u003e\n\u003c/div\u003e\n","funding_links":[],"categories":[],"sub_categories":[],"project_url":"https://awesome.ecosyste.ms/api/v1/projects/github.com%2Ffuturesea-dev%2Frecognizer-app","html_url":"https://awesome.ecosyste.ms/projects/github.com%2Ffuturesea-dev%2Frecognizer-app","lists_url":"https://awesome.ecosyste.ms/api/v1/projects/github.com%2Ffuturesea-dev%2Frecognizer-app/lists"}